use super::{input_err_msg, read_csv};
use crate::commodity::CommodityMap;
use crate::id::IDCollection;
use crate::process::{
Process, ProcessActivityLimitsMap, ProcessFlowsMap, ProcessID, ProcessInvestmentConstraintsMap,
ProcessMap, ProcessParameterMap,
};
use crate::region::{RegionID, parse_region_str};
use crate::time_slice::TimeSliceInfo;
use crate::units::{ActivityPerCapacity, Capacity};
use anyhow::{Context, Ok, Result, ensure};
use indexmap::IndexSet;
use log::warn;
use serde::Deserialize;
use std::path::Path;
use std::rc::Rc;
mod availability;
use availability::read_process_availabilities;
mod flow;
use flow::read_process_flows;
mod parameter;
use crate::id::define_id_getter;
use parameter::read_process_parameters;
mod investment_constraints;
use investment_constraints::read_process_investment_constraints;
const PROCESSES_FILE_NAME: &str = "processes.csv";
#[derive(PartialEq, Debug, Deserialize)]
struct ProcessRaw {
id: ProcessID,
description: String,
regions: String,
primary_output: Option<String>,
start_year: Option<u32>,
end_year: Option<u32>,
capacity_to_activity: Option<ActivityPerCapacity>,
unit_size: Option<Capacity>,
}
define_id_getter! {ProcessRaw, ProcessID}
pub fn read_processes(
model_dir: &Path,
commodities: &CommodityMap,
region_ids: &IndexSet<RegionID>,
time_slice_info: &TimeSliceInfo,
milestone_years: &[u32],
) -> Result<ProcessMap> {
let mut processes = read_processes_file(model_dir, milestone_years, region_ids, commodities)?;
let mut activity_limits = read_process_availabilities(model_dir, &processes, time_slice_info)?;
let mut flows = read_process_flows(model_dir, &mut processes, commodities, milestone_years)?;
let mut parameters = read_process_parameters(model_dir, &processes, milestone_years)?;
let mut investment_constraints =
read_process_investment_constraints(model_dir, &processes, milestone_years)?;
for (id, process) in &mut processes {
let process = Rc::get_mut(process).unwrap();
process.activity_limits = activity_limits.remove(id).unwrap();
process.flows = flows.remove(id).unwrap();
process.parameters = parameters.remove(id).unwrap();
process.investment_constraints = investment_constraints.remove(id).unwrap_or_default();
}
Ok(processes)
}
fn read_processes_file(
model_dir: &Path,
milestone_years: &[u32],
region_ids: &IndexSet<RegionID>,
commodities: &CommodityMap,
) -> Result<ProcessMap> {
let file_path = model_dir.join(PROCESSES_FILE_NAME);
let processes_csv = read_csv(&file_path)?;
read_processes_file_from_iter(processes_csv, milestone_years, region_ids, commodities)
.with_context(|| input_err_msg(&file_path))
}
fn read_processes_file_from_iter<I>(
iter: I,
milestone_years: &[u32],
region_ids: &IndexSet<RegionID>,
commodities: &CommodityMap,
) -> Result<ProcessMap>
where
I: Iterator<Item = ProcessRaw>,
{
let mut processes = ProcessMap::new();
for process_raw in iter {
let start_year = process_raw.start_year.unwrap_or_else(|| {
warn!(
"Using default start year {} for process {}.",
milestone_years[0], process_raw.id
);
milestone_years[0]
});
let end_year = process_raw.end_year.unwrap_or_else(|| {
warn!(
"Using default end year {} for process {}.",
milestone_years.last().unwrap(),
process_raw.id
);
*milestone_years.last().unwrap()
});
ensure!(
start_year <= end_year,
"Error in parameter for process {}: start_year > end_year",
process_raw.id
);
let years = start_year..=end_year;
let regions = parse_region_str(&process_raw.regions, region_ids)?;
let primary_output = process_raw
.primary_output
.map(|id| {
let id = commodities.get_id(id.trim())?;
Ok(id.clone())
})
.transpose()?;
let capacity_to_activity = process_raw
.capacity_to_activity
.unwrap_or(ActivityPerCapacity(1.0));
if process_raw.unit_size.is_some() {
ensure!(
process_raw.unit_size > Some(Capacity(0.0)),
"Error in process {}: unit_size must be > 0 or None",
process_raw.id
);
}
ensure!(
capacity_to_activity >= ActivityPerCapacity(0.0),
"Error in process {}: capacity_to_activity must be >= 0",
process_raw.id
);
let process = Process {
id: process_raw.id.clone(),
description: process_raw.description,
years,
activity_limits: ProcessActivityLimitsMap::new(),
flows: ProcessFlowsMap::new(),
parameters: ProcessParameterMap::new(),
regions,
primary_output,
capacity_to_activity,
investment_constraints: ProcessInvestmentConstraintsMap::new(),
unit_size: process_raw.unit_size,
};
ensure!(
processes.insert(process_raw.id, process.into()).is_none(),
"Duplicate process ID"
);
}
Ok(processes)
}