1use super::*;
3use crate::commodity::{Commodity, CommodityID, CommodityMap, CommodityType};
4use crate::process::{
5 Process, ProcessEnergyLimitsMap, ProcessFlow, ProcessID, ProcessMap, ProcessParameterMap,
6};
7use crate::region::{parse_region_str, RegionID};
8use crate::time_slice::TimeSliceInfo;
9use anyhow::{bail, ensure, Context, Ok, Result};
10use serde::Deserialize;
11use std::collections::{HashMap, HashSet};
12use std::ops::RangeInclusive;
13use std::path::Path;
14use std::rc::Rc;
15
16mod availability;
17use availability::read_process_availabilities;
18mod flow;
19use flow::read_process_flows;
20mod parameter;
21use crate::id::define_id_getter;
22use parameter::read_process_parameters;
23
24const PROCESSES_FILE_NAME: &str = "processes.csv";
25
26#[derive(PartialEq, Debug, Deserialize)]
27struct ProcessRaw {
28 id: ProcessID,
29 description: String,
30 regions: String,
31 start_year: Option<u32>,
32 end_year: Option<u32>,
33}
34define_id_getter! {ProcessRaw, ProcessID}
35
36pub fn read_processes(
50 model_dir: &Path,
51 commodities: &CommodityMap,
52 region_ids: &HashSet<RegionID>,
53 time_slice_info: &TimeSliceInfo,
54 milestone_years: &[u32],
55) -> Result<ProcessMap> {
56 let year_range = milestone_years[0]..=milestone_years[milestone_years.len() - 1];
57 let mut processes = read_processes_file(model_dir, &year_range, region_ids)?;
58 let process_ids = processes.keys().cloned().collect();
59
60 let mut energy_limits = read_process_availabilities(
61 model_dir,
62 &process_ids,
63 &processes,
64 time_slice_info,
65 milestone_years,
66 )?;
67 let mut flows = read_process_flows(model_dir, &process_ids, commodities)?;
68 let mut parameters =
69 read_process_parameters(model_dir, &process_ids, &processes, milestone_years)?;
70
71 validate_commodities(
73 commodities,
74 &flows,
75 region_ids,
76 milestone_years,
77 time_slice_info,
78 ¶meters,
79 &energy_limits,
80 )?;
81
82 for (id, process) in processes.iter_mut() {
84 process.energy_limits = energy_limits
85 .remove(id)
86 .with_context(|| format!("Missing availabilities for process {id}"))?;
87 process.flows = flows
88 .remove(id)
89 .with_context(|| format!("Missing flows for process {id}"))?;
90 process.parameters = parameters
91 .remove(id)
92 .with_context(|| format!("Missing parameters for process {id}"))?;
93 }
94
95 let mut process_map = ProcessMap::new();
97 for (id, process) in processes {
98 process_map.insert(id, process.into());
99 }
100
101 Ok(process_map)
102}
103
104fn read_processes_file(
105 model_dir: &Path,
106 year_range: &RangeInclusive<u32>,
107 region_ids: &HashSet<RegionID>,
108) -> Result<HashMap<ProcessID, Process>> {
109 let file_path = model_dir.join(PROCESSES_FILE_NAME);
110 let processes_csv = read_csv(&file_path)?;
111 read_processes_file_from_iter(processes_csv, year_range, region_ids)
112 .with_context(|| input_err_msg(&file_path))
113}
114
115fn read_processes_file_from_iter<I>(
116 iter: I,
117 year_range: &RangeInclusive<u32>,
118 region_ids: &HashSet<RegionID>,
119) -> Result<HashMap<ProcessID, Process>>
120where
121 I: Iterator<Item = ProcessRaw>,
122{
123 let mut processes = HashMap::new();
124 for process_raw in iter {
125 let start_year = process_raw.start_year.unwrap_or(*year_range.start());
126 let end_year = process_raw.end_year.unwrap_or(*year_range.end());
127
128 ensure!(
130 start_year <= end_year,
131 "Error in parameter for process {}: start_year > end_year",
132 process_raw.id
133 );
134
135 let regions = parse_region_str(&process_raw.regions, region_ids)?;
137
138 let process = Process {
139 id: process_raw.id.clone(),
140 description: process_raw.description,
141 years: start_year..=end_year,
142 energy_limits: ProcessEnergyLimitsMap::new(),
143 flows: Vec::new(),
144 parameters: ProcessParameterMap::new(),
145 regions,
146 };
147
148 ensure!(
149 processes.insert(process_raw.id, process).is_none(),
150 "Duplicate process ID"
151 );
152 }
153
154 Ok(processes)
155}
156
157struct ValidationParams<'a> {
158 flows: &'a HashMap<ProcessID, Vec<ProcessFlow>>,
159 region_ids: &'a HashSet<RegionID>,
160 milestone_years: &'a [u32],
161 time_slice_info: &'a TimeSliceInfo,
162 parameters: &'a HashMap<ProcessID, ProcessParameterMap>,
163 availabilities: &'a HashMap<ProcessID, ProcessEnergyLimitsMap>,
164}
165
166fn validate_commodities(
168 commodities: &CommodityMap,
169 flows: &HashMap<ProcessID, Vec<ProcessFlow>>,
170 region_ids: &HashSet<RegionID>,
171 milestone_years: &[u32],
172 time_slice_info: &TimeSliceInfo,
173 parameters: &HashMap<ProcessID, ProcessParameterMap>,
174 availabilities: &HashMap<ProcessID, ProcessEnergyLimitsMap>,
175) -> anyhow::Result<()> {
176 let params = ValidationParams {
177 flows,
178 region_ids,
179 milestone_years,
180 time_slice_info,
181 parameters,
182 availabilities,
183 };
184 for (commodity_id, commodity) in commodities {
185 match commodity.kind {
186 CommodityType::SupplyEqualsDemand => {
187 validate_sed_commodity(commodity_id, commodity, flows)?;
188 }
189 CommodityType::ServiceDemand => {
190 validate_svd_commodity(commodity_id, commodity, ¶ms)?;
191 }
192 _ => {}
193 }
194 }
195 Ok(())
196}
197
198fn validate_sed_commodity(
199 commodity_id: &CommodityID,
200 commodity: &Rc<Commodity>,
201 flows: &HashMap<ProcessID, Vec<ProcessFlow>>,
202) -> Result<()> {
203 let mut has_producer = false;
204 let mut has_consumer = false;
205
206 for flow in flows.values().flatten() {
207 if Rc::ptr_eq(&flow.commodity, commodity) {
208 if flow.flow > 0.0 {
209 has_producer = true;
210 } else if flow.flow < 0.0 {
211 has_consumer = true;
212 }
213
214 if has_producer && has_consumer {
215 return Ok(());
216 }
217 }
218 }
219
220 bail!(
221 "Commodity {} of 'SED' type must have both producer and consumer processes",
222 commodity_id
223 );
224}
225
226fn validate_svd_commodity(
227 commodity_id: &CommodityID,
228 commodity: &Rc<Commodity>,
229 params: &ValidationParams,
230) -> Result<()> {
231 for region_id in params.region_ids.iter() {
232 for year in params.milestone_years.iter().copied() {
233 for time_slice in params.time_slice_info.iter_ids() {
234 let demand = commodity
235 .demand
236 .get(&(region_id.clone(), year, time_slice.clone()))
237 .unwrap();
238 if demand > &0.0 {
239 let mut has_producer = false;
240
241 for flow in params.flows.values().flatten() {
245 if Rc::ptr_eq(&flow.commodity, commodity)
246 && flow.flow > 0.0
247 && params
248 .parameters
249 .get(&*flow.process_id)
250 .unwrap()
251 .keys()
252 .contains(&(region_id.clone(), year))
253 && params
254 .availabilities
255 .get(&*flow.process_id)
256 .unwrap()
257 .get(&(region_id.clone(), year, time_slice.clone()))
258 .unwrap()
259 .end()
260 > &0.0
261 {
262 has_producer = true;
263 break;
264 }
265 }
266
267 ensure!(
268 has_producer,
269 "Commodity {} of 'SVD' type must have producer processes for region {} in year {}",
270 commodity_id,
271 region_id,
272 year
273 );
274 }
275 }
276 }
277 }
278
279 Ok(())
280}
281
282#[cfg(test)]
283mod tests {
284 use crate::commodity::{CommodityCostMap, DemandMap};
285 use crate::process::{FlowType, ProcessParameter, ProcessParameterMap};
286 use crate::time_slice::TimeSliceID;
287 use crate::time_slice::TimeSliceLevel;
288 use std::iter;
289
290 use super::*;
291
292 struct ProcessData {
293 availabilities: HashMap<ProcessID, ProcessEnergyLimitsMap>,
294 parameters: HashMap<ProcessID, ProcessParameterMap>,
295 region_ids: HashSet<RegionID>,
296 }
297
298 fn get_process_data() -> ProcessData {
300 let availabilities = ["process1", "process2"]
301 .into_iter()
302 .map(|id| {
303 let mut map = ProcessEnergyLimitsMap::new();
304
305 map.insert(
306 (
307 "GBR".into(),
308 2010,
309 TimeSliceID {
310 season: "winter".into(),
311 time_of_day: "day".into(),
312 },
313 ),
314 0.1..=0.9,
315 );
316 (id.into(), map)
317 })
318 .collect();
319
320 let parameter = Rc::new(ProcessParameter {
321 capital_cost: 0.0,
322 fixed_operating_cost: 0.0,
323 variable_operating_cost: 0.0,
324 lifetime: 1,
325 discount_rate: 1.0,
326 capacity_to_activity: 0.0,
327 });
328 let parameters = ["process1", "process2"]
329 .into_iter()
330 .map(|id| {
331 let mut parameter_map: ProcessParameterMap = HashMap::new();
332 parameter_map.insert(("GBR".into(), 2010), parameter.clone());
333 (id.into(), parameter_map)
334 })
335 .collect();
336
337 let region_ids = HashSet::from_iter(iter::once("GBR".into()));
338
339 ProcessData {
340 availabilities,
341 parameters,
342 region_ids,
343 }
344 }
345
346 #[test]
347 fn test_validate_commodities() {
348 let data = get_process_data();
349 let commodity_sed = Rc::new(Commodity {
351 id: "commodity_sed".into(),
352 description: "SED commodity".into(),
353 kind: CommodityType::SupplyEqualsDemand,
354 time_slice_level: TimeSliceLevel::Annual,
355 costs: CommodityCostMap::new(),
356 demand: DemandMap::new(),
357 });
358
359 let milestone_years = [2010];
360
361 let id = TimeSliceID {
363 season: "winter".into(),
364 time_of_day: "day".into(),
365 };
366 let fractions: IndexMap<TimeSliceID, f64> = [(id.clone(), 1.0)].into_iter().collect();
367 let time_slice_info = TimeSliceInfo {
368 seasons: [id.season].into_iter().collect(),
369 times_of_day: [id.time_of_day].into_iter().collect(),
370 fractions,
371 };
372 let parameters = data.parameters;
373 let availabilities = data.availabilities;
374
375 let mut demand_map = DemandMap::new();
377 for region in data.region_ids.iter() {
378 for year in milestone_years {
379 for time_slice in time_slice_info.iter_ids() {
380 demand_map.insert((region.clone(), year, time_slice.clone()), 0.5);
381 }
382 }
383 }
384 let commodity_non_sed = Rc::new(Commodity {
385 id: "commodity_non_sed".into(),
386 description: "Non-SED commodity".into(),
387 kind: CommodityType::ServiceDemand,
388 time_slice_level: TimeSliceLevel::Annual,
389 costs: CommodityCostMap::new(),
390 demand: demand_map,
391 });
392
393 let commodities: CommodityMap = [
394 (commodity_sed.id.clone(), Rc::clone(&commodity_sed)),
395 (commodity_non_sed.id.clone(), Rc::clone(&commodity_non_sed)),
396 ]
397 .into_iter()
398 .collect();
399
400 let process_flows: HashMap<ProcessID, Vec<ProcessFlow>> = [
402 (
403 "process1".into(),
404 vec![
405 ProcessFlow {
406 process_id: "process1".into(),
407 commodity: Rc::clone(&commodity_sed),
408 flow: 10.0,
409 flow_type: FlowType::Fixed,
410 flow_cost: 1.0,
411 is_pac: false,
412 },
413 ProcessFlow {
414 process_id: "process1".into(),
415 commodity: Rc::clone(&commodity_non_sed),
416 flow: 5.0,
417 flow_type: FlowType::Fixed,
418 flow_cost: 1.0,
419 is_pac: false,
420 },
421 ],
422 ),
423 (
424 "process2".into(),
425 vec![ProcessFlow {
426 process_id: "process2".into(),
427 commodity: Rc::clone(&commodity_sed),
428 flow: -10.0,
429 flow_type: FlowType::Fixed,
430 flow_cost: 1.0,
431 is_pac: false,
432 }],
433 ),
434 ]
435 .into_iter()
436 .collect();
437
438 assert!(validate_commodities(
440 &commodities,
441 &process_flows,
442 &data.region_ids,
443 &milestone_years,
444 &time_slice_info,
445 ¶meters,
446 &availabilities,
447 )
448 .is_ok());
449
450 let process_flows_invalid: HashMap<ProcessID, Vec<ProcessFlow>> = [(
452 "process1".into(),
453 vec![ProcessFlow {
454 process_id: "process1".into(),
455 commodity: Rc::clone(&commodity_sed),
456 flow: 10.0,
457 flow_type: FlowType::Fixed,
458 flow_cost: 1.0,
459 is_pac: false,
460 }],
461 )]
462 .into_iter()
463 .collect();
464
465 assert!(validate_commodities(
467 &commodities,
468 &process_flows_invalid,
469 &data.region_ids,
470 &milestone_years,
471 &time_slice_info,
472 ¶meters,
473 &availabilities,
474 )
475 .is_err());
476 }
477}