1use super::*;
3use crate::agent::{
4 Agent, AgentCommodityPortionsMap, AgentCostLimitsMap, AgentID, AgentMap, DecisionRule,
5};
6use crate::commodity::CommodityMap;
7use crate::process::ProcessMap;
8use crate::region::{parse_region_str, RegionID};
9use anyhow::{bail, ensure, Context, Result};
10use serde::Deserialize;
11use std::collections::HashSet;
12use std::path::Path;
13
14mod objective;
15use objective::read_agent_objectives;
16mod search_space;
17use search_space::read_agent_search_space;
18mod commodity_portion;
19use commodity_portion::read_agent_commodity_portions;
20mod cost_limit;
21use cost_limit::read_agent_cost_limits;
22
23const AGENT_FILE_NAME: &str = "agents.csv";
24
25#[derive(Debug, Deserialize, PartialEq, Clone)]
27struct AgentRaw {
28 id: String,
30 description: String,
32 regions: String,
34 decision_rule: String,
36 decision_lexico_tolerance: Option<f64>,
38}
39
40pub fn read_agents(
53 model_dir: &Path,
54 commodities: &CommodityMap,
55 processes: &ProcessMap,
56 region_ids: &HashSet<RegionID>,
57 milestone_years: &[u32],
58) -> Result<AgentMap> {
59 let process_ids = processes.keys().cloned().collect();
60 let mut agents = read_agents_file(model_dir, region_ids)?;
61 let agent_ids = agents.keys().cloned().collect();
62
63 let mut objectives = read_agent_objectives(model_dir, &agents, milestone_years)?;
64 let mut search_spaces = read_agent_search_space(
65 model_dir,
66 &agents,
67 &process_ids,
68 commodities,
69 milestone_years,
70 )?;
71 let mut agent_commodities = read_agent_commodity_portions(
72 model_dir,
73 &agents,
74 commodities,
75 region_ids,
76 milestone_years,
77 )?;
78 let mut cost_limits = read_agent_cost_limits(model_dir, &agent_ids, milestone_years)?;
79
80 for (id, agent) in agents.iter_mut() {
81 agent.objectives = objectives.remove(id).unwrap();
82 if let Some(search_space) = search_spaces.remove(id) {
83 agent.search_space = search_space;
84 }
85 agent.commodity_portions = agent_commodities
86 .remove(id)
87 .with_context(|| format!("Missing commodity portions for agent {}", id))?;
88 if let Some(cost_limits) = cost_limits.remove(id) {
89 agent.cost_limits = cost_limits;
90 }
91 }
92
93 Ok(agents)
94}
95
96fn read_agents_file(model_dir: &Path, region_ids: &HashSet<RegionID>) -> Result<AgentMap> {
108 let file_path = model_dir.join(AGENT_FILE_NAME);
109 let agents_csv = read_csv(&file_path)?;
110 read_agents_file_from_iter(agents_csv, region_ids).with_context(|| input_err_msg(&file_path))
111}
112
113fn read_agents_file_from_iter<I>(iter: I, region_ids: &HashSet<RegionID>) -> Result<AgentMap>
115where
116 I: Iterator<Item = AgentRaw>,
117{
118 let mut agents = AgentMap::new();
119 for agent_raw in iter {
120 let regions = parse_region_str(&agent_raw.regions, region_ids)?;
122
123 let decision_rule = match agent_raw.decision_rule.to_ascii_lowercase().as_str() {
125 "single" => DecisionRule::Single,
126 "weighted" => DecisionRule::Weighted,
127 "lexico" => {
128 let tolerance = agent_raw
129 .decision_lexico_tolerance
130 .with_context(|| "Missing tolerance for lexico decision rule")?;
131 ensure!(
132 tolerance >= 0.0,
133 "Lexico tolerance must be non-negative, got {}",
134 tolerance
135 );
136 DecisionRule::Lexicographical { tolerance }
137 }
138 invalid_rule => bail!("Invalid decision rule: {}", invalid_rule),
139 };
140
141 let agent = Agent {
142 id: AgentID(agent_raw.id.into()),
143 description: agent_raw.description,
144 commodity_portions: AgentCommodityPortionsMap::new(),
145 search_space: Vec::new(),
146 decision_rule,
147 cost_limits: AgentCostLimitsMap::new(),
148 regions,
149 objectives: Vec::new(),
150 };
151
152 ensure!(
153 agents.insert(agent.id.clone(), agent).is_none(),
154 "Duplicate agent ID"
155 );
156 }
157
158 Ok(agents)
159}
160
161#[cfg(test)]
162mod tests {
163 use super::*;
164 use crate::agent::DecisionRule;
165 use std::iter;
166
167 #[test]
168 fn test_read_agents_file_from_iter() {
169 let region_ids = HashSet::from(["GBR".into()]);
171 let agent = AgentRaw {
172 id: "agent".into(),
173 description: "".into(),
174 decision_rule: "single".into(),
175 decision_lexico_tolerance: None,
176 regions: "GBR".into(),
177 };
178 let agent_out = Agent {
179 id: "agent".into(),
180 description: "".into(),
181 commodity_portions: AgentCommodityPortionsMap::new(),
182 search_space: Vec::new(),
183 decision_rule: DecisionRule::Single,
184 cost_limits: AgentCostLimitsMap::new(),
185 regions: HashSet::from(["GBR".into()]),
186 objectives: Vec::new(),
187 };
188 let expected = AgentMap::from_iter(iter::once(("agent".into(), agent_out)));
189 let actual = read_agents_file_from_iter(iter::once(agent), ®ion_ids).unwrap();
190 assert_eq!(actual, expected);
191
192 let agents = [
194 AgentRaw {
195 id: "agent".into(),
196 description: "".into(),
197 decision_rule: "single".into(),
198 decision_lexico_tolerance: None,
199 regions: "GBR".into(),
200 },
201 AgentRaw {
202 id: "agent".into(),
203 description: "".into(),
204 decision_rule: "single".into(),
205 decision_lexico_tolerance: None,
206 regions: "GBR".into(),
207 },
208 ];
209 assert!(read_agents_file_from_iter(agents.into_iter(), ®ion_ids).is_err());
210
211 let agent = AgentRaw {
213 id: "agent".into(),
214 description: "".into(),
215 decision_rule: "lexico".into(),
216 decision_lexico_tolerance: None,
217 regions: "GBR".into(),
218 };
219 assert!(read_agents_file_from_iter(iter::once(agent), ®ion_ids).is_err());
220 }
221}