1use super::{input_err_msg, read_csv};
3use crate::commodity::CommodityMap;
4use crate::id::IDCollection;
5use crate::process::{
6 Process, ProcessActivityLimitsMap, ProcessFlowsMap, ProcessID, ProcessMap, ProcessParameterMap,
7};
8use crate::region::{RegionID, parse_region_str};
9use crate::time_slice::TimeSliceInfo;
10use crate::units::ActivityPerCapacity;
11use anyhow::{Context, Ok, Result, ensure};
12use indexmap::IndexSet;
13use itertools::chain;
14use serde::Deserialize;
15use std::path::Path;
16use std::rc::Rc;
17
18mod availability;
19use availability::read_process_availabilities;
20mod flow;
21use flow::read_process_flows;
22mod parameter;
23use crate::id::define_id_getter;
24use parameter::read_process_parameters;
25
26const PROCESSES_FILE_NAME: &str = "processes.csv";
27
28#[derive(PartialEq, Debug, Deserialize)]
29struct ProcessRaw {
30 id: ProcessID,
31 description: String,
32 regions: String,
33 primary_output: Option<String>,
34 start_year: Option<u32>,
35 end_year: Option<u32>,
36 capacity_to_activity: Option<ActivityPerCapacity>,
37}
38define_id_getter! {ProcessRaw, ProcessID}
39
40pub fn read_processes(
54 model_dir: &Path,
55 commodities: &CommodityMap,
56 region_ids: &IndexSet<RegionID>,
57 time_slice_info: &TimeSliceInfo,
58 milestone_years: &[u32],
59) -> Result<ProcessMap> {
60 let base_year = milestone_years[0];
61 let mut processes = read_processes_file(model_dir, milestone_years, region_ids, commodities)?;
62 let mut activity_limits =
63 read_process_availabilities(model_dir, &processes, time_slice_info, base_year)?;
64 let mut flows = read_process_flows(model_dir, &mut processes, commodities)?;
65 let mut parameters = read_process_parameters(model_dir, &processes, base_year)?;
66
67 for (id, process) in &mut processes {
69 let process = Rc::get_mut(process).unwrap();
71
72 process.activity_limits = activity_limits.remove(id).unwrap();
74 process.flows = flows.remove(id).unwrap();
75 process.parameters = parameters.remove(id).unwrap();
76 }
77
78 Ok(processes)
79}
80
81fn read_processes_file(
82 model_dir: &Path,
83 milestone_years: &[u32],
84 region_ids: &IndexSet<RegionID>,
85 commodities: &CommodityMap,
86) -> Result<ProcessMap> {
87 let file_path = model_dir.join(PROCESSES_FILE_NAME);
88 let processes_csv = read_csv(&file_path)?;
89 read_processes_file_from_iter(processes_csv, milestone_years, region_ids, commodities)
90 .with_context(|| input_err_msg(&file_path))
91}
92
93fn read_processes_file_from_iter<I>(
94 iter: I,
95 milestone_years: &[u32],
96 region_ids: &IndexSet<RegionID>,
97 commodities: &CommodityMap,
98) -> Result<ProcessMap>
99where
100 I: Iterator<Item = ProcessRaw>,
101{
102 let mut processes = ProcessMap::new();
103 for process_raw in iter {
104 let start_year = process_raw.start_year.unwrap_or(milestone_years[0]);
105 let end_year = process_raw
106 .end_year
107 .unwrap_or(*milestone_years.last().unwrap());
108
109 ensure!(
111 start_year <= end_year,
112 "Error in parameter for process {}: start_year > end_year",
113 process_raw.id
114 );
115
116 let years = chain(
120 start_year..milestone_years[0],
121 milestone_years
122 .iter()
123 .copied()
124 .filter(|year| (start_year..=end_year).contains(year)),
125 )
126 .collect();
127
128 let regions = parse_region_str(&process_raw.regions, region_ids)?;
130
131 let primary_output = process_raw
133 .primary_output
134 .map(|id| {
135 let id = commodities.get_id(id.trim())?;
136 Ok(id.clone())
137 })
138 .transpose()?;
139
140 let capacity_to_activity = process_raw
141 .capacity_to_activity
142 .unwrap_or(ActivityPerCapacity(1.0));
143
144 ensure!(
146 capacity_to_activity >= ActivityPerCapacity(0.0),
147 "Error in process {}: capacity_to_activity must be >= 0",
148 process_raw.id
149 );
150
151 let process = Process {
152 id: process_raw.id.clone(),
153 description: process_raw.description,
154 years,
155 activity_limits: ProcessActivityLimitsMap::new(),
156 flows: ProcessFlowsMap::new(),
157 parameters: ProcessParameterMap::new(),
158 regions,
159 primary_output,
160 capacity_to_activity,
161 };
162
163 ensure!(
164 processes.insert(process_raw.id, process.into()).is_none(),
165 "Duplicate process ID"
166 );
167 }
168
169 Ok(processes)
170}