use super::super::{input_err_msg, read_csv_optional, try_insert};
use crate::agent::{Agent, AgentID, AgentMap, AgentSearchSpaceMap};
use crate::commodity::CommodityID;
use crate::id::IDCollection;
use crate::process::{Process, ProcessMap};
use crate::year::parse_year_str;
use anyhow::{Context, Result};
use itertools::Itertools;
use serde::Deserialize;
use std::collections::{HashMap, HashSet};
use std::path::Path;
use std::rc::Rc;
const AGENT_SEARCH_SPACE_FILE_NAME: &str = "agent_search_space.csv";
#[derive(PartialEq, Debug, Deserialize)]
struct AgentSearchSpaceRaw {
agent_id: String,
commodity_id: String,
years: String,
search_space: String,
}
#[derive(Debug)]
struct AgentSearchSpace {
agent_id: AgentID,
commodity_id: CommodityID,
years: Vec<u32>,
search_space: Rc<Vec<Rc<Process>>>,
}
impl AgentSearchSpaceRaw {
fn into_agent_search_space(
self,
agents: &AgentMap,
processes: &ProcessMap,
commodity_ids: &HashSet<CommodityID>,
milestone_years: &[u32],
) -> Result<AgentSearchSpace> {
let search_space = Rc::new(parse_search_space_str(&self.search_space, processes)?);
let commodity_id = commodity_ids.get_id(&self.commodity_id)?;
let year = parse_year_str(&self.years, milestone_years)?;
let agent_id = agents.get_id(&self.agent_id)?;
Ok(AgentSearchSpace {
agent_id: agent_id.clone(),
commodity_id: commodity_id.clone(),
years: year,
search_space,
})
}
}
fn parse_search_space_str(search_space: &str, processes: &ProcessMap) -> Result<Vec<Rc<Process>>> {
let search_space = search_space.trim();
if search_space.is_empty() || search_space.eq_ignore_ascii_case("all") {
Ok(processes.values().cloned().collect())
} else {
search_space
.split(';')
.map(|id| {
let process = processes
.get(id.trim())
.with_context(|| format!("Invalid process '{id}'"))?;
Ok(process.clone())
})
.try_collect()
}
}
pub fn read_agent_search_space(
model_dir: &Path,
agents: &AgentMap,
processes: &ProcessMap,
commodity_ids: &HashSet<CommodityID>,
milestone_years: &[u32],
) -> Result<HashMap<AgentID, AgentSearchSpaceMap>> {
let file_path = model_dir.join(AGENT_SEARCH_SPACE_FILE_NAME);
let iter = read_csv_optional::<AgentSearchSpaceRaw>(&file_path)?;
read_agent_search_space_from_iter(iter, agents, processes, commodity_ids, milestone_years)
.with_context(|| input_err_msg(&file_path))
}
fn read_agent_search_space_from_iter<I>(
iter: I,
agents: &AgentMap,
processes: &ProcessMap,
commodity_ids: &HashSet<CommodityID>,
milestone_years: &[u32],
) -> Result<HashMap<AgentID, AgentSearchSpaceMap>>
where
I: Iterator<Item = AgentSearchSpaceRaw>,
{
let mut search_spaces = HashMap::new();
for search_space_raw in iter {
let search_space = search_space_raw.into_agent_search_space(
agents,
processes,
commodity_ids,
milestone_years,
)?;
let map = search_spaces
.entry(search_space.agent_id)
.or_insert_with(AgentSearchSpaceMap::new);
for year in search_space.years {
try_insert(
map,
&(search_space.commodity_id.clone(), year),
search_space.search_space.clone(),
)?;
}
}
for (agent_id, agent) in agents {
let search_space = search_spaces
.entry(agent_id.clone())
.or_insert_with(AgentSearchSpaceMap::new);
fill_missing_search_space_entries(agent, processes, search_space);
}
Ok(search_spaces)
}
fn fill_missing_search_space_entries(
agent: &Agent,
processes: &ProcessMap,
search_space: &mut AgentSearchSpaceMap,
) {
assert!(!agent.commodity_portions.is_empty());
for (commodity_id, year) in agent.commodity_portions.keys() {
let key = (commodity_id.clone(), *year);
search_space.entry(key).or_insert_with(|| {
Rc::new(get_all_producers(processes, commodity_id, *year).collect())
});
}
}
fn get_all_producers<'a>(
processes: &'a ProcessMap,
commodity_id: &'a CommodityID,
year: u32,
) -> impl Iterator<Item = Rc<Process>> + 'a {
processes
.values()
.filter(move |process| {
process.active_for_year(year) && process.primary_output.as_ref() == Some(commodity_id)
})
.cloned()
}
#[cfg(test)]
mod tests {
use super::*;
use crate::fixture::{agents, assert_error, region_ids};
use crate::process::{
ProcessActivityLimitsMap, ProcessFlowsMap, ProcessID, ProcessInvestmentConstraintsMap,
ProcessParameterMap,
};
use crate::region::RegionID;
use crate::units::ActivityPerCapacity;
use indexmap::IndexSet;
use rstest::{fixture, rstest};
use std::iter;
#[fixture]
pub fn processes(region_ids: IndexSet<RegionID>) -> ProcessMap {
["A", "B", "C"]
.map(|id| {
let id: ProcessID = id.into();
let process = Process {
id: id.clone(),
description: "Description".into(),
years: 2010..=2020,
activity_limits: ProcessActivityLimitsMap::new(),
flows: ProcessFlowsMap::new(),
parameters: ProcessParameterMap::new(),
regions: region_ids.clone(),
primary_output: None,
capacity_to_activity: ActivityPerCapacity(1.0),
investment_constraints: ProcessInvestmentConstraintsMap::new(),
unit_size: None,
};
(id, process.into())
})
.into_iter()
.collect()
}
#[fixture]
fn commodity_ids() -> HashSet<CommodityID> {
iter::once("commodity1".into()).collect()
}
#[rstest]
fn search_space_raw_into_search_space_valid(
agents: AgentMap,
processes: ProcessMap,
commodity_ids: HashSet<CommodityID>,
) {
let raw = AgentSearchSpaceRaw {
agent_id: "agent1".into(),
commodity_id: "commodity1".into(),
years: "2020".into(),
search_space: "A;B".into(),
};
raw.into_agent_search_space(&agents, &processes, &commodity_ids, &[2020])
.unwrap();
}
#[rstest]
fn search_space_raw_into_search_space_invalid_commodity_id(
agents: AgentMap,
processes: ProcessMap,
commodity_ids: HashSet<CommodityID>,
) {
let raw = AgentSearchSpaceRaw {
agent_id: "agent1".into(),
commodity_id: "invalid_commodity".into(),
years: "2020".into(),
search_space: "A;B".into(),
};
assert_error!(
raw.into_agent_search_space(&agents, &processes, &commodity_ids, &[2020]),
"Unknown ID invalid_commodity found"
);
}
#[rstest]
fn search_space_raw_into_search_space_invalid_process_id(
agents: AgentMap,
processes: ProcessMap,
commodity_ids: HashSet<CommodityID>,
) {
let raw = AgentSearchSpaceRaw {
agent_id: "agent1".into(),
commodity_id: "commodity1".into(),
years: "2020".into(),
search_space: "A;D".into(),
};
assert_error!(
raw.into_agent_search_space(&agents, &processes, &commodity_ids, &[2020]),
"Invalid process 'D'"
);
}
}