1use super::{input_err_msg, read_csv};
3use crate::commodity::CommodityMap;
4use crate::id::IDCollection;
5use crate::process::{
6 Process, ProcessActivityLimitsMap, ProcessFlowsMap, ProcessID, ProcessMap, ProcessParameterMap,
7};
8use crate::region::{RegionID, parse_region_str};
9use crate::time_slice::TimeSliceInfo;
10use crate::units::ActivityPerCapacity;
11use anyhow::{Context, Ok, Result, ensure};
12use indexmap::IndexSet;
13use itertools::chain;
14use serde::Deserialize;
15use std::path::Path;
16use std::rc::Rc;
17
18mod availability;
19use availability::read_process_availabilities;
20mod flow;
21use flow::read_process_flows;
22mod parameter;
23use crate::id::define_id_getter;
24use parameter::read_process_parameters;
25
26const PROCESSES_FILE_NAME: &str = "processes.csv";
27
28#[derive(PartialEq, Debug, Deserialize)]
29struct ProcessRaw {
30 id: ProcessID,
31 description: String,
32 regions: String,
33 primary_output: Option<String>,
34 start_year: Option<u32>,
35 end_year: Option<u32>,
36 capacity_to_activity: Option<ActivityPerCapacity>,
37}
38define_id_getter! {ProcessRaw, ProcessID}
39
40pub fn read_processes(
54 model_dir: &Path,
55 commodities: &CommodityMap,
56 region_ids: &IndexSet<RegionID>,
57 time_slice_info: &TimeSliceInfo,
58 milestone_years: &[u32],
59) -> Result<ProcessMap> {
60 let mut processes = read_processes_file(model_dir, milestone_years, region_ids, commodities)?;
61 let mut activity_limits = read_process_availabilities(model_dir, &processes, time_slice_info)?;
62 let mut flows = read_process_flows(model_dir, &mut processes, commodities)?;
63 let mut parameters = read_process_parameters(model_dir, &processes, milestone_years[0])?;
64
65 for (id, process) in &mut processes {
67 let process = Rc::get_mut(process).unwrap();
69
70 process.activity_limits = activity_limits.remove(id).unwrap();
72 process.flows = flows.remove(id).unwrap();
73 process.parameters = parameters.remove(id).unwrap();
74 }
75
76 Ok(processes)
77}
78
79fn read_processes_file(
80 model_dir: &Path,
81 milestone_years: &[u32],
82 region_ids: &IndexSet<RegionID>,
83 commodities: &CommodityMap,
84) -> Result<ProcessMap> {
85 let file_path = model_dir.join(PROCESSES_FILE_NAME);
86 let processes_csv = read_csv(&file_path)?;
87 read_processes_file_from_iter(processes_csv, milestone_years, region_ids, commodities)
88 .with_context(|| input_err_msg(&file_path))
89}
90
91fn read_processes_file_from_iter<I>(
92 iter: I,
93 milestone_years: &[u32],
94 region_ids: &IndexSet<RegionID>,
95 commodities: &CommodityMap,
96) -> Result<ProcessMap>
97where
98 I: Iterator<Item = ProcessRaw>,
99{
100 let mut processes = ProcessMap::new();
101 for process_raw in iter {
102 let start_year = process_raw.start_year.unwrap_or(milestone_years[0]);
103 let end_year = process_raw
104 .end_year
105 .unwrap_or(*milestone_years.last().unwrap());
106
107 ensure!(
109 start_year <= end_year,
110 "Error in parameter for process {}: start_year > end_year",
111 process_raw.id
112 );
113
114 let years = chain(
118 start_year..milestone_years[0],
119 milestone_years
120 .iter()
121 .copied()
122 .filter(|year| (start_year..=end_year).contains(year)),
123 )
124 .collect();
125
126 let regions = parse_region_str(&process_raw.regions, region_ids)?;
128
129 let primary_output = process_raw
131 .primary_output
132 .map(|id| {
133 let id = commodities.get_id(id.trim())?;
134 Ok(id.clone())
135 })
136 .transpose()?;
137
138 let capacity_to_activity = process_raw
139 .capacity_to_activity
140 .unwrap_or(ActivityPerCapacity(1.0));
141
142 ensure!(
144 capacity_to_activity >= ActivityPerCapacity(0.0),
145 "Error in process {}: capacity_to_activity must be >= 0",
146 process_raw.id
147 );
148
149 let process = Process {
150 id: process_raw.id.clone(),
151 description: process_raw.description,
152 years,
153 activity_limits: ProcessActivityLimitsMap::new(),
154 flows: ProcessFlowsMap::new(),
155 parameters: ProcessParameterMap::new(),
156 regions,
157 primary_output,
158 capacity_to_activity,
159 };
160
161 ensure!(
162 processes.insert(process_raw.id, process.into()).is_none(),
163 "Duplicate process ID"
164 );
165 }
166
167 Ok(processes)
168}