1use super::*;
3use crate::commodity::CommodityMap;
4use crate::id::IDCollection;
5use crate::process::{
6 Process, ProcessActivityLimitsMap, ProcessFlowsMap, ProcessID, ProcessMap, ProcessParameterMap,
7};
8use crate::region::{RegionID, parse_region_str};
9use crate::time_slice::TimeSliceInfo;
10use anyhow::{Context, Ok, Result, ensure};
11use indexmap::IndexSet;
12use itertools::chain;
13use serde::Deserialize;
14use std::path::Path;
15use std::rc::Rc;
16
17mod availability;
18use availability::read_process_availabilities;
19mod flow;
20use flow::read_process_flows;
21mod parameter;
22use crate::id::define_id_getter;
23use parameter::read_process_parameters;
24
25const PROCESSES_FILE_NAME: &str = "processes.csv";
26
27#[derive(PartialEq, Debug, Deserialize)]
28struct ProcessRaw {
29 id: ProcessID,
30 description: String,
31 regions: String,
32 primary_output: Option<String>,
33 start_year: Option<u32>,
34 end_year: Option<u32>,
35}
36define_id_getter! {ProcessRaw, ProcessID}
37
38pub fn read_processes(
52 model_dir: &Path,
53 commodities: &CommodityMap,
54 region_ids: &IndexSet<RegionID>,
55 time_slice_info: &TimeSliceInfo,
56 milestone_years: &[u32],
57) -> Result<ProcessMap> {
58 let mut processes = read_processes_file(model_dir, milestone_years, region_ids, commodities)?;
59 let mut activity_limits = read_process_availabilities(model_dir, &processes, time_slice_info)?;
60 let mut flows = read_process_flows(model_dir, &mut processes, commodities)?;
61 let mut parameters = read_process_parameters(model_dir, &processes, milestone_years[0])?;
62
63 for (id, process) in processes.iter_mut() {
65 let process = Rc::get_mut(process).unwrap();
67
68 process.activity_limits = activity_limits.remove(id).unwrap();
70 process.flows = flows.remove(id).unwrap();
71 process.parameters = parameters.remove(id).unwrap();
72 }
73
74 Ok(processes)
75}
76
77fn read_processes_file(
78 model_dir: &Path,
79 milestone_years: &[u32],
80 region_ids: &IndexSet<RegionID>,
81 commodities: &CommodityMap,
82) -> Result<ProcessMap> {
83 let file_path = model_dir.join(PROCESSES_FILE_NAME);
84 let processes_csv = read_csv(&file_path)?;
85 read_processes_file_from_iter(processes_csv, milestone_years, region_ids, commodities)
86 .with_context(|| input_err_msg(&file_path))
87}
88
89fn read_processes_file_from_iter<I>(
90 iter: I,
91 milestone_years: &[u32],
92 region_ids: &IndexSet<RegionID>,
93 commodities: &CommodityMap,
94) -> Result<ProcessMap>
95where
96 I: Iterator<Item = ProcessRaw>,
97{
98 let mut processes = ProcessMap::new();
99 for process_raw in iter {
100 let start_year = process_raw.start_year.unwrap_or(milestone_years[0]);
101 let end_year = process_raw
102 .end_year
103 .unwrap_or(*milestone_years.last().unwrap());
104
105 ensure!(
107 start_year <= end_year,
108 "Error in parameter for process {}: start_year > end_year",
109 process_raw.id
110 );
111
112 let years = chain(
116 start_year..milestone_years[0],
117 milestone_years
118 .iter()
119 .copied()
120 .filter(|year| (start_year..=end_year).contains(year)),
121 )
122 .collect();
123
124 let regions = parse_region_str(&process_raw.regions, region_ids)?;
126
127 let primary_output = process_raw
129 .primary_output
130 .map(|id| {
131 let id = commodities.get_id(id.trim())?;
132 Ok(id.clone())
133 })
134 .transpose()?;
135
136 let process = Process {
137 id: process_raw.id.clone(),
138 description: process_raw.description,
139 years,
140 activity_limits: ProcessActivityLimitsMap::new(),
141 flows: ProcessFlowsMap::new(),
142 parameters: ProcessParameterMap::new(),
143 regions,
144 primary_output,
145 };
146
147 ensure!(
148 processes.insert(process_raw.id, process.into()).is_none(),
149 "Duplicate process ID"
150 );
151 }
152
153 Ok(processes)
154}