use crate::agent::AgentID;
use crate::asset::{Asset, AssetGroupID, AssetID, AssetRef};
use crate::commodity::CommodityID;
use crate::process::ProcessID;
use crate::region::RegionID;
use crate::simulation::CommodityPrices;
use crate::simulation::investment::appraisal::AppraisalOutput;
use crate::simulation::optimisation::{FlowMap, Solution};
use crate::time_slice::TimeSliceID;
use crate::units::{
Activity, Capacity, Flow, Money, MoneyPerActivity, MoneyPerCapacity, MoneyPerFlow,
};
use anyhow::{Context, Result, ensure};
use csv;
use indexmap::IndexMap;
use itertools::Itertools;
use serde::{Deserialize, Serialize};
use std::fs;
use std::fs::File;
use std::path::{Path, PathBuf};
pub mod metadata;
use metadata::write_metadata;
const COMMODITY_FLOWS_FILE_NAME: &str = "commodity_flows.csv";
const COMMODITY_PRICES_FILE_NAME: &str = "commodity_prices.csv";
const ASSETS_FILE_NAME: &str = "assets.csv";
const ACTIVITY_ASSET_DISPATCH: &str = "debug_dispatch_assets.csv";
const COMMODITY_BALANCE_DUALS_FILE_NAME: &str = "debug_commodity_balance_duals.csv";
const UNMET_DEMAND_FILE_NAME: &str = "debug_unmet_demand.csv";
const SOLVER_VALUES_FILE_NAME: &str = "debug_solver.csv";
const APPRAISAL_RESULTS_FILE_NAME: &str = "debug_appraisal_results.csv";
const APPRAISAL_RESULTS_TIME_SLICE_FILE_NAME: &str = "debug_appraisal_results_time_slices.csv";
pub fn get_output_dir(model_dir: &Path, results_root: PathBuf) -> Result<PathBuf> {
let model_dir = model_dir
.canonicalize() .context("Could not resolve path to model")?;
let model_name = model_dir
.file_name()
.context("Model cannot be in root folder")?
.to_str()
.context("Invalid chars in model dir name")?;
Ok([results_root, model_name.into()].iter().collect())
}
pub fn get_graphs_dir(model_dir: &Path, graph_results_root: PathBuf) -> Result<PathBuf> {
let model_dir = model_dir
.canonicalize() .context("Could not resolve path to model")?;
let model_name = model_dir
.file_name()
.context("Model cannot be in root folder")?
.to_str()
.context("Invalid chars in model dir name")?;
Ok([graph_results_root, model_name.into()].iter().collect())
}
pub fn create_output_directory(output_dir: &Path, allow_overwrite: bool) -> Result<bool> {
let overwrite = if let Ok(mut it) = fs::read_dir(output_dir) {
if it.next().is_none() {
return Ok(false);
}
ensure!(
allow_overwrite,
"Output folder already exists and is not empty. \
Please delete the folder or pass the --overwrite command-line option."
);
fs::remove_dir_all(output_dir).context("Could not delete folder")?;
true
} else {
false
};
fs::create_dir_all(output_dir)?;
Ok(overwrite)
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct AssetRow {
asset_id: AssetID,
process_id: ProcessID,
region_id: RegionID,
agent_id: AgentID,
group_id: Option<AssetGroupID>,
commission_year: u32,
decommission_year: Option<u32>,
capacity: Capacity,
}
impl AssetRow {
fn new(asset: &Asset) -> Self {
Self {
asset_id: asset.id().unwrap(),
process_id: asset.process_id().clone(),
region_id: asset.region_id().clone(),
agent_id: asset.agent_id().unwrap().clone(),
group_id: asset.group_id(),
commission_year: asset.commission_year(),
decommission_year: asset.decommission_year(),
capacity: asset.total_capacity(),
}
}
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct CommodityFlowRow {
milestone_year: u32,
asset_id: AssetID,
commodity_id: CommodityID,
time_slice: TimeSliceID,
flow: Flow,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct CommodityPriceRow {
milestone_year: u32,
commodity_id: CommodityID,
region_id: RegionID,
time_slice: TimeSliceID,
price: MoneyPerFlow,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct ActivityRow {
milestone_year: u32,
run_description: String,
asset_id: Option<AssetID>,
process_id: ProcessID,
region_id: RegionID,
time_slice: TimeSliceID,
activity: Option<Activity>,
activity_dual: Option<MoneyPerActivity>,
column_dual: Option<MoneyPerActivity>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct CommodityBalanceDualsRow {
milestone_year: u32,
run_description: String,
commodity_id: CommodityID,
region_id: RegionID,
time_slice: TimeSliceID,
value: MoneyPerFlow,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct UnmetDemandRow {
milestone_year: u32,
run_description: String,
commodity_id: CommodityID,
region_id: RegionID,
time_slice: TimeSliceID,
value: Flow,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct SolverValuesRow {
milestone_year: u32,
run_description: String,
objective_value: Money,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct AppraisalResultsRow {
milestone_year: u32,
run_description: String,
asset_id: Option<AssetID>,
process_id: ProcessID,
region_id: RegionID,
capacity: Capacity,
capacity_coefficient: MoneyPerCapacity,
metric: Option<f64>,
}
#[derive(Serialize, Deserialize, Debug, PartialEq)]
struct AppraisalResultsTimeSliceRow {
milestone_year: u32,
run_description: String,
asset_id: Option<AssetID>,
process_id: ProcessID,
region_id: RegionID,
time_slice: TimeSliceID,
activity: Activity,
activity_coefficient: MoneyPerActivity,
demand: Flow,
unmet_demand: Flow,
}
struct DebugDataWriter {
context: Option<String>,
commodity_balance_duals_writer: csv::Writer<File>,
unmet_demand_writer: csv::Writer<File>,
solver_values_writer: csv::Writer<File>,
appraisal_results_writer: csv::Writer<File>,
appraisal_results_time_slice_writer: csv::Writer<File>,
dispatch_asset_writer: csv::Writer<File>,
}
impl DebugDataWriter {
fn create(output_path: &Path) -> Result<Self> {
let new_writer = |file_name| {
let file_path = output_path.join(file_name);
csv::Writer::from_path(file_path)
};
Ok(Self {
context: None,
commodity_balance_duals_writer: new_writer(COMMODITY_BALANCE_DUALS_FILE_NAME)?,
unmet_demand_writer: new_writer(UNMET_DEMAND_FILE_NAME)?,
solver_values_writer: new_writer(SOLVER_VALUES_FILE_NAME)?,
appraisal_results_writer: new_writer(APPRAISAL_RESULTS_FILE_NAME)?,
appraisal_results_time_slice_writer: new_writer(
APPRAISAL_RESULTS_TIME_SLICE_FILE_NAME,
)?,
dispatch_asset_writer: new_writer(ACTIVITY_ASSET_DISPATCH)?,
})
}
fn with_context(&self, run_description: &str) -> String {
if let Some(context) = &self.context {
format!("{context}; {run_description}")
} else {
run_description.to_string()
}
}
fn write_dispatch_debug_info(
&mut self,
milestone_year: u32,
run_description: &str,
solution: &Solution,
) -> Result<()> {
self.write_activity(
milestone_year,
run_description,
solution.iter_activity(),
solution.iter_activity_duals(),
solution.iter_column_duals(),
)?;
self.write_commodity_balance_duals(
milestone_year,
run_description,
solution.iter_commodity_balance_duals(),
)?;
self.write_unmet_demand(
milestone_year,
run_description,
solution.iter_unmet_demand(),
)?;
self.write_solver_values(milestone_year, run_description, solution.objective_value)?;
Ok(())
}
fn write_activity<'a, I, J, K>(
&mut self,
milestone_year: u32,
run_description: &str,
iter_activity: I,
iter_activity_duals: J,
iter_column_duals: K,
) -> Result<()>
where
I: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, Activity)>,
J: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, MoneyPerActivity)>,
K: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, MoneyPerActivity)>,
{
type CompiledActivityData = (
Option<Activity>,
Option<MoneyPerActivity>,
Option<MoneyPerActivity>,
);
let mut map: IndexMap<(&AssetRef, &TimeSliceID), CompiledActivityData> = IndexMap::new();
for (asset, time_slice, activity) in iter_activity {
map.entry((asset, time_slice)).or_default().0 = Some(activity);
}
for (asset, time_slice, activity_dual) in iter_activity_duals {
map.entry((asset, time_slice)).or_default().1 = Some(activity_dual);
}
for (asset, time_slice, column_dual) in iter_column_duals {
map.entry((asset, time_slice)).or_default().2 = Some(column_dual);
}
for (asset, time_slice, activity, activity_dual, column_dual) in
map.iter()
.map(|(&(agent, ts), &(activity, activity_dual, column_dual))| {
(agent, ts, activity, activity_dual, column_dual)
})
{
let row = ActivityRow {
milestone_year,
run_description: self.with_context(run_description),
asset_id: asset.id(),
process_id: asset.process_id().clone(),
region_id: asset.region_id().clone(),
time_slice: time_slice.clone(),
activity,
activity_dual,
column_dual,
};
self.dispatch_asset_writer.serialize(row)?;
}
Ok(())
}
fn write_commodity_balance_duals<'a, I>(
&mut self,
milestone_year: u32,
run_description: &str,
iter: I,
) -> Result<()>
where
I: Iterator<Item = (&'a CommodityID, &'a RegionID, &'a TimeSliceID, MoneyPerFlow)>,
{
for (commodity_id, region_id, time_slice, value) in iter {
let row = CommodityBalanceDualsRow {
milestone_year,
run_description: self.with_context(run_description),
commodity_id: commodity_id.clone(),
region_id: region_id.clone(),
time_slice: time_slice.clone(),
value,
};
self.commodity_balance_duals_writer.serialize(row)?;
}
Ok(())
}
fn write_unmet_demand<'a, I>(
&mut self,
milestone_year: u32,
run_description: &str,
iter: I,
) -> Result<()>
where
I: Iterator<Item = (&'a CommodityID, &'a RegionID, &'a TimeSliceID, Flow)>,
{
for (commodity_id, region_id, time_slice, value) in iter {
let row = UnmetDemandRow {
milestone_year,
run_description: self.with_context(run_description),
commodity_id: commodity_id.clone(),
region_id: region_id.clone(),
time_slice: time_slice.clone(),
value,
};
self.unmet_demand_writer.serialize(row)?;
}
Ok(())
}
fn write_solver_values(
&mut self,
milestone_year: u32,
run_description: &str,
objective_value: Money,
) -> Result<()> {
let row = SolverValuesRow {
milestone_year,
run_description: self.with_context(run_description),
objective_value,
};
self.solver_values_writer.serialize(row)?;
self.solver_values_writer.flush()?;
Ok(())
}
fn write_appraisal_results(
&mut self,
milestone_year: u32,
run_description: &str,
appraisal_results: &[AppraisalOutput],
) -> Result<()> {
for result in appraisal_results {
let row = AppraisalResultsRow {
milestone_year,
run_description: self.with_context(run_description),
asset_id: result.asset.id(),
process_id: result.asset.process_id().clone(),
region_id: result.asset.region_id().clone(),
capacity: result.capacity.total_capacity(),
capacity_coefficient: result.coefficients.capacity_coefficient,
metric: result.metric.as_ref().map(|m| m.value()),
};
self.appraisal_results_writer.serialize(row)?;
}
Ok(())
}
fn write_appraisal_time_slice_results(
&mut self,
milestone_year: u32,
run_description: &str,
appraisal_results: &[AppraisalOutput],
demand: &IndexMap<TimeSliceID, Flow>,
) -> Result<()> {
for result in appraisal_results {
for (time_slice, activity) in &result.activity {
let activity_coefficient = result.coefficients.activity_coefficients[time_slice];
let demand = demand[time_slice];
let unmet_demand = result.unmet_demand[time_slice];
let row = AppraisalResultsTimeSliceRow {
milestone_year,
run_description: self.with_context(run_description),
asset_id: result.asset.id(),
process_id: result.asset.process_id().clone(),
region_id: result.asset.region_id().clone(),
time_slice: time_slice.clone(),
activity: *activity,
activity_coefficient,
demand,
unmet_demand,
};
self.appraisal_results_time_slice_writer.serialize(row)?;
}
}
Ok(())
}
fn flush(&mut self) -> Result<()> {
self.commodity_balance_duals_writer.flush()?;
self.unmet_demand_writer.flush()?;
self.solver_values_writer.flush()?;
self.appraisal_results_writer.flush()?;
self.appraisal_results_time_slice_writer.flush()?;
self.dispatch_asset_writer.flush()?;
Ok(())
}
}
pub struct DataWriter {
assets_path: PathBuf,
flows_writer: csv::Writer<File>,
prices_writer: csv::Writer<File>,
debug_writer: Option<DebugDataWriter>,
}
impl DataWriter {
pub fn create(output_path: &Path, model_path: &Path, save_debug_info: bool) -> Result<Self> {
write_metadata(output_path, model_path).context("Failed to save metadata")?;
let new_writer = |file_name| {
let file_path = output_path.join(file_name);
csv::Writer::from_path(file_path)
};
let debug_writer = if save_debug_info {
Some(DebugDataWriter::create(output_path)?)
} else {
None
};
Ok(Self {
assets_path: output_path.join(ASSETS_FILE_NAME),
flows_writer: new_writer(COMMODITY_FLOWS_FILE_NAME)?,
prices_writer: new_writer(COMMODITY_PRICES_FILE_NAME)?,
debug_writer,
})
}
pub fn write_dispatch_debug_info(
&mut self,
milestone_year: u32,
run_description: &str,
solution: &Solution,
) -> Result<()> {
if let Some(wtr) = &mut self.debug_writer {
wtr.write_dispatch_debug_info(milestone_year, run_description, solution)?;
}
Ok(())
}
pub fn write_appraisal_debug_info(
&mut self,
milestone_year: u32,
run_description: &str,
appraisal_results: &[AppraisalOutput],
demand: &IndexMap<TimeSliceID, Flow>,
) -> Result<()> {
if let Some(wtr) = &mut self.debug_writer {
wtr.write_appraisal_results(milestone_year, run_description, appraisal_results)?;
wtr.write_appraisal_time_slice_results(
milestone_year,
run_description,
appraisal_results,
demand,
)?;
}
Ok(())
}
pub fn write_assets<'a, I>(&mut self, assets: I) -> Result<()>
where
I: Iterator<Item = &'a AssetRef>,
{
let mut writer = csv::Writer::from_path(&self.assets_path)?;
for asset in assets.sorted() {
let row = AssetRow::new(asset);
writer.serialize(row)?;
}
writer.flush()?;
Ok(())
}
pub fn write_flows(&mut self, milestone_year: u32, flow_map: &FlowMap) -> Result<()> {
for ((asset, commodity_id, time_slice), flow) in flow_map {
let row = CommodityFlowRow {
milestone_year,
asset_id: asset.id().unwrap(),
commodity_id: commodity_id.clone(),
time_slice: time_slice.clone(),
flow: *flow,
};
self.flows_writer.serialize(row)?;
}
Ok(())
}
pub fn write_prices(&mut self, milestone_year: u32, prices: &CommodityPrices) -> Result<()> {
for (commodity_id, region_id, time_slice, price) in prices.iter() {
let row = CommodityPriceRow {
milestone_year,
commodity_id: commodity_id.clone(),
region_id: region_id.clone(),
time_slice: time_slice.clone(),
price,
};
self.prices_writer.serialize(row)?;
}
Ok(())
}
pub fn flush(&mut self) -> Result<()> {
self.flows_writer.flush()?;
self.prices_writer.flush()?;
if let Some(wtr) = &mut self.debug_writer {
wtr.flush()?;
}
Ok(())
}
pub fn set_debug_context(&mut self, context: String) {
if let Some(wtr) = &mut self.debug_writer {
wtr.context = Some(context);
}
}
pub fn clear_debug_context(&mut self) {
if let Some(wtr) = &mut self.debug_writer {
wtr.context = None;
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::asset::AssetPool;
use crate::fixture::{appraisal_output, asset, assets, commodity_id, region_id, time_slice};
use crate::simulation::investment::appraisal::AppraisalOutput;
use crate::time_slice::TimeSliceID;
use indexmap::indexmap;
use itertools::{Itertools, assert_equal};
use rstest::rstest;
use std::iter;
use tempfile::tempdir;
#[rstest]
fn write_assets(assets: AssetPool) {
let dir = tempdir().unwrap();
{
let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
writer.write_assets(assets.iter()).unwrap();
writer.flush().unwrap();
}
let asset = assets.iter().next().unwrap();
let expected = AssetRow::new(asset);
let records: Vec<AssetRow> = csv::Reader::from_path(dir.path().join(ASSETS_FILE_NAME))
.unwrap()
.into_deserialize()
.try_collect()
.unwrap();
assert_equal(records, iter::once(expected));
}
#[rstest]
fn write_flows(assets: AssetPool, commodity_id: CommodityID, time_slice: TimeSliceID) {
let milestone_year = 2020;
let asset = assets.iter().next().unwrap();
let flow_map = indexmap! {
(asset.clone(), commodity_id.clone(), time_slice.clone()) => Flow(42.0)
};
let dir = tempdir().unwrap();
{
let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
writer.write_flows(milestone_year, &flow_map).unwrap();
writer.flush().unwrap();
}
let expected = CommodityFlowRow {
milestone_year,
asset_id: asset.id().unwrap(),
commodity_id,
time_slice,
flow: Flow(42.0),
};
let records: Vec<CommodityFlowRow> =
csv::Reader::from_path(dir.path().join(COMMODITY_FLOWS_FILE_NAME))
.unwrap()
.into_deserialize()
.try_collect()
.unwrap();
assert_equal(records, iter::once(expected));
}
#[rstest]
fn write_prices(commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID) {
let milestone_year = 2020;
let price = MoneyPerFlow(42.0);
let mut prices = CommodityPrices::default();
prices.insert(&commodity_id, ®ion_id, &time_slice, price);
let dir = tempdir().unwrap();
{
let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
writer.write_prices(milestone_year, &prices).unwrap();
writer.flush().unwrap();
}
let expected = CommodityPriceRow {
milestone_year,
commodity_id,
region_id,
time_slice,
price,
};
let records: Vec<CommodityPriceRow> =
csv::Reader::from_path(dir.path().join(COMMODITY_PRICES_FILE_NAME))
.unwrap()
.into_deserialize()
.try_collect()
.unwrap();
assert_equal(records, iter::once(expected));
}
#[rstest]
fn write_commodity_balance_duals(
commodity_id: CommodityID,
region_id: RegionID,
time_slice: TimeSliceID,
) {
let milestone_year = 2020;
let run_description = "test_run".to_string();
let value = MoneyPerFlow(0.5);
let dir = tempdir().unwrap();
{
let mut writer = DebugDataWriter::create(dir.path()).unwrap();
writer
.write_commodity_balance_duals(
milestone_year,
&run_description,
iter::once((&commodity_id, ®ion_id, &time_slice, value)),
)
.unwrap();
writer.flush().unwrap();
}
let expected = CommodityBalanceDualsRow {
milestone_year,
run_description,
commodity_id,
region_id,
time_slice,
value,
};
let records: Vec<CommodityBalanceDualsRow> =
csv::Reader::from_path(dir.path().join(COMMODITY_BALANCE_DUALS_FILE_NAME))
.unwrap()
.into_deserialize()
.try_collect()
.unwrap();
assert_equal(records, iter::once(expected));
}
#[rstest]
fn write_unmet_demand(commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID) {
let milestone_year = 2020;
let run_description = "test_run".to_string();
let value = Flow(0.5);
let dir = tempdir().unwrap();
{
let mut writer = DebugDataWriter::create(dir.path()).unwrap();
writer
.write_unmet_demand(
milestone_year,
&run_description,
iter::once((&commodity_id, ®ion_id, &time_slice, value)),
)
.unwrap();
writer.flush().unwrap();
}
let expected = UnmetDemandRow {
milestone_year,
run_description,
commodity_id,
region_id,
time_slice,
value,
};
let records: Vec<UnmetDemandRow> =
csv::Reader::from_path(dir.path().join(UNMET_DEMAND_FILE_NAME))
.unwrap()
.into_deserialize()
.try_collect()
.unwrap();
assert_equal(records, iter::once(expected));
}
#[rstest]
fn write_activity(assets: AssetPool, time_slice: TimeSliceID) {
let milestone_year = 2020;
let run_description = "test_run".to_string();
let activity = Activity(100.5);
let activity_dual = MoneyPerActivity(-1.5);
let column_dual = MoneyPerActivity(5.0);
let dir = tempdir().unwrap();
let asset = assets.iter().next().unwrap();
{
let mut writer = DebugDataWriter::create(dir.path()).unwrap();
writer
.write_activity(
milestone_year,
&run_description,
iter::once((asset, &time_slice, activity)),
iter::once((asset, &time_slice, activity_dual)),
iter::once((asset, &time_slice, column_dual)),
)
.unwrap();
writer.flush().unwrap();
}
let expected = ActivityRow {
milestone_year,
run_description,
asset_id: asset.id(),
process_id: asset.process_id().clone(),
region_id: asset.region_id().clone(),
time_slice,
activity: Some(activity),
activity_dual: Some(activity_dual),
column_dual: Some(column_dual),
};
let records: Vec<ActivityRow> =
csv::Reader::from_path(dir.path().join(ACTIVITY_ASSET_DISPATCH))
.unwrap()
.into_deserialize()
.try_collect()
.unwrap();
assert_equal(records, iter::once(expected));
}
#[rstest]
fn write_activity_with_missing_keys(assets: AssetPool, time_slice: TimeSliceID) {
let milestone_year = 2020;
let run_description = "test_run".to_string();
let activity = Activity(100.5);
let dir = tempdir().unwrap();
let asset = assets.iter().next().unwrap();
{
let mut writer = DebugDataWriter::create(dir.path()).unwrap();
writer
.write_activity(
milestone_year,
&run_description,
iter::once((asset, &time_slice, activity)),
iter::empty::<(&AssetRef, &TimeSliceID, MoneyPerActivity)>(),
iter::empty::<(&AssetRef, &TimeSliceID, MoneyPerActivity)>(),
)
.unwrap();
writer.flush().unwrap();
}
let expected = ActivityRow {
milestone_year,
run_description,
asset_id: asset.id(),
process_id: asset.process_id().clone(),
region_id: asset.region_id().clone(),
time_slice,
activity: Some(activity),
activity_dual: None,
column_dual: None,
};
let records: Vec<ActivityRow> =
csv::Reader::from_path(dir.path().join(ACTIVITY_ASSET_DISPATCH))
.unwrap()
.into_deserialize()
.try_collect()
.unwrap();
assert_equal(records, iter::once(expected));
}
#[rstest]
fn write_solver_values() {
let milestone_year = 2020;
let run_description = "test_run".to_string();
let objective_value = Money(1234.56);
let dir = tempdir().unwrap();
{
let mut writer = DebugDataWriter::create(dir.path()).unwrap();
writer
.write_solver_values(milestone_year, &run_description, objective_value)
.unwrap();
writer.flush().unwrap();
}
let expected = SolverValuesRow {
milestone_year,
run_description,
objective_value,
};
let records: Vec<SolverValuesRow> =
csv::Reader::from_path(dir.path().join(SOLVER_VALUES_FILE_NAME))
.unwrap()
.into_deserialize()
.try_collect()
.unwrap();
assert_equal(records, iter::once(expected));
}
#[rstest]
fn write_appraisal_results(asset: Asset, appraisal_output: AppraisalOutput) {
let milestone_year = 2020;
let run_description = "test_run".to_string();
let dir = tempdir().unwrap();
{
let mut writer = DebugDataWriter::create(dir.path()).unwrap();
writer
.write_appraisal_results(milestone_year, &run_description, &[appraisal_output])
.unwrap();
writer.flush().unwrap();
}
let expected = AppraisalResultsRow {
milestone_year,
run_description,
asset_id: None,
process_id: asset.process_id().clone(),
region_id: asset.region_id().clone(),
capacity: Capacity(42.0),
capacity_coefficient: MoneyPerCapacity(2.14),
metric: Some(4.14),
};
let records: Vec<AppraisalResultsRow> =
csv::Reader::from_path(dir.path().join(APPRAISAL_RESULTS_FILE_NAME))
.unwrap()
.into_deserialize()
.try_collect()
.unwrap();
assert_equal(records, iter::once(expected));
}
#[rstest]
fn write_appraisal_time_slice_results(
asset: Asset,
appraisal_output: AppraisalOutput,
time_slice: TimeSliceID,
) {
let milestone_year = 2020;
let run_description = "test_run".to_string();
let dir = tempdir().unwrap();
let demand = indexmap! {time_slice.clone() => Flow(100.0) };
{
let mut writer = DebugDataWriter::create(dir.path()).unwrap();
writer
.write_appraisal_time_slice_results(
milestone_year,
&run_description,
&[appraisal_output],
&demand,
)
.unwrap();
writer.flush().unwrap();
}
let expected = AppraisalResultsTimeSliceRow {
milestone_year,
run_description,
asset_id: None,
process_id: asset.process_id().clone(),
region_id: asset.region_id().clone(),
time_slice: time_slice.clone(),
activity: Activity(10.0),
activity_coefficient: MoneyPerActivity(0.5),
demand: Flow(100.0),
unmet_demand: Flow(5.0),
};
let records: Vec<AppraisalResultsTimeSliceRow> =
csv::Reader::from_path(dir.path().join(APPRAISAL_RESULTS_TIME_SLICE_FILE_NAME))
.unwrap()
.into_deserialize()
.try_collect()
.unwrap();
assert_equal(records, iter::once(expected));
}
#[test]
fn create_output_directory_new_directory() {
let temp_dir = tempdir().unwrap();
let output_dir = temp_dir.path().join("new_output");
let result = create_output_directory(&output_dir, false).unwrap();
assert!(!result);
assert!(output_dir.exists());
assert!(output_dir.is_dir());
}
#[test]
fn create_output_directory_existing_empty_directory() {
let temp_dir = tempdir().unwrap();
let output_dir = temp_dir.path().join("empty_output");
fs::create_dir(&output_dir).unwrap();
let result = create_output_directory(&output_dir, false).unwrap();
assert!(!result);
assert!(output_dir.exists());
assert!(output_dir.is_dir());
}
#[test]
fn create_output_directory_existing_with_files_no_overwrite() {
let temp_dir = tempdir().unwrap();
let output_dir = temp_dir.path().join("output_with_files");
fs::create_dir(&output_dir).unwrap();
fs::write(output_dir.join("existing_file.txt"), "some content").unwrap();
let result = create_output_directory(&output_dir, false);
assert!(result.is_err());
assert!(
result
.unwrap_err()
.to_string()
.contains("Output folder already exists")
);
}
#[test]
fn create_output_directory_existing_with_files_allow_overwrite() {
let temp_dir = tempdir().unwrap();
let output_dir = temp_dir.path().join("output_with_files");
fs::create_dir(&output_dir).unwrap();
let file_path = output_dir.join("existing_file.txt");
fs::write(&file_path, "some content").unwrap();
let result = create_output_directory(&output_dir, true).unwrap();
assert!(result);
assert!(output_dir.exists());
assert!(output_dir.is_dir());
assert!(!file_path.exists()); }
#[test]
fn create_output_directory_nested_path() {
let temp_dir = tempdir().unwrap();
let output_dir = temp_dir.path().join("nested").join("path").join("output");
let result = create_output_directory(&output_dir, false).unwrap();
assert!(!result);
assert!(output_dir.exists());
assert!(output_dir.is_dir());
}
#[test]
fn create_output_directory_existing_subdirs_with_files_allow_overwrite() {
let temp_dir = tempdir().unwrap();
let output_dir = temp_dir.path().join("output_with_subdirs");
fs::create_dir_all(output_dir.join("subdir")).unwrap();
fs::write(output_dir.join("file1.txt"), "content1").unwrap();
fs::write(output_dir.join("subdir").join("file2.txt"), "content2").unwrap();
let result = create_output_directory(&output_dir, true).unwrap();
assert!(result);
assert!(output_dir.exists());
assert!(output_dir.is_dir());
assert!(!output_dir.join("file1.txt").exists());
assert!(!output_dir.join("subdir").exists());
}
}