muse2/input/
process.rs

1//! Code for reading process data from CSV files.
2use super::{input_err_msg, read_csv};
3use crate::commodity::CommodityMap;
4use crate::id::IDCollection;
5use crate::process::{
6    Process, ProcessActivityLimitsMap, ProcessFlowsMap, ProcessID, ProcessInvestmentConstraintsMap,
7    ProcessMap, ProcessParameterMap,
8};
9use crate::region::{RegionID, parse_region_str};
10use crate::time_slice::TimeSliceInfo;
11use crate::units::{ActivityPerCapacity, Capacity};
12use anyhow::{Context, Ok, Result, ensure};
13use indexmap::IndexSet;
14use log::warn;
15use serde::Deserialize;
16use std::path::Path;
17use std::rc::Rc;
18
19mod availability;
20use availability::read_process_availabilities;
21mod flow;
22use flow::read_process_flows;
23mod parameter;
24use crate::id::define_id_getter;
25use parameter::read_process_parameters;
26mod investment_constraints;
27use investment_constraints::read_process_investment_constraints;
28
29const PROCESSES_FILE_NAME: &str = "processes.csv";
30
31#[derive(PartialEq, Debug, Deserialize)]
32struct ProcessRaw {
33    id: ProcessID,
34    description: String,
35    regions: String,
36    primary_output: Option<String>,
37    start_year: Option<u32>,
38    end_year: Option<u32>,
39    capacity_to_activity: Option<ActivityPerCapacity>,
40    unit_size: Option<Capacity>,
41}
42define_id_getter! {ProcessRaw, ProcessID}
43
44/// Read process information from the specified CSV files.
45///
46/// # Arguments
47///
48/// * `model_dir` - Folder containing model configuration files
49/// * `commodities` - Commodities for the model
50/// * `region_ids` - All possible region IDs
51/// * `time_slice_info` - Information about seasons and times of day
52/// * `milestone_years` - All milestone years
53///
54/// # Returns
55///
56/// A `ProcessMap` mapping `ProcessID` to `Process`.
57pub fn read_processes(
58    model_dir: &Path,
59    commodities: &CommodityMap,
60    region_ids: &IndexSet<RegionID>,
61    time_slice_info: &TimeSliceInfo,
62    milestone_years: &[u32],
63) -> Result<ProcessMap> {
64    let mut processes = read_processes_file(model_dir, milestone_years, region_ids, commodities)?;
65    let mut activity_limits = read_process_availabilities(model_dir, &processes, time_slice_info)?;
66    let mut flows = read_process_flows(model_dir, &mut processes, commodities, milestone_years)?;
67    let mut parameters = read_process_parameters(model_dir, &processes, milestone_years)?;
68    let mut investment_constraints =
69        read_process_investment_constraints(model_dir, &processes, milestone_years)?;
70
71    // Add data to Process objects
72    for (id, process) in &mut processes {
73        // This will always succeed as we know there will only be one reference to the process here
74        let process = Rc::get_mut(process).unwrap();
75
76        // We have already checked that there are maps for every process so this will succeed
77        process.activity_limits = activity_limits.remove(id).unwrap();
78        process.flows = flows.remove(id).unwrap();
79        process.parameters = parameters.remove(id).unwrap();
80        process.investment_constraints = investment_constraints.remove(id).unwrap_or_default();
81    }
82
83    Ok(processes)
84}
85
86fn read_processes_file(
87    model_dir: &Path,
88    milestone_years: &[u32],
89    region_ids: &IndexSet<RegionID>,
90    commodities: &CommodityMap,
91) -> Result<ProcessMap> {
92    let file_path = model_dir.join(PROCESSES_FILE_NAME);
93    let processes_csv = read_csv(&file_path)?;
94    read_processes_file_from_iter(processes_csv, milestone_years, region_ids, commodities)
95        .with_context(|| input_err_msg(&file_path))
96}
97
98fn read_processes_file_from_iter<I>(
99    iter: I,
100    milestone_years: &[u32],
101    region_ids: &IndexSet<RegionID>,
102    commodities: &CommodityMap,
103) -> Result<ProcessMap>
104where
105    I: Iterator<Item = ProcessRaw>,
106{
107    let mut processes = ProcessMap::new();
108    for process_raw in iter {
109        let start_year = process_raw.start_year.unwrap_or_else(|| {
110            warn!(
111                "Using default start year {} for process {}.",
112                milestone_years[0], process_raw.id
113            );
114            milestone_years[0]
115        });
116        let end_year = process_raw.end_year.unwrap_or_else(|| {
117            warn!(
118                "Using default end year {} for process {}.",
119                milestone_years.last().unwrap(),
120                process_raw.id
121            );
122            *milestone_years.last().unwrap()
123        });
124
125        // Check year range is valid
126        ensure!(
127            start_year <= end_year,
128            "Error in parameter for process {}: start_year > end_year",
129            process_raw.id
130        );
131        let years = start_year..=end_year;
132
133        // Parse region ID
134        let regions = parse_region_str(&process_raw.regions, region_ids)?;
135
136        // Check whether primary output is valid
137        let primary_output = process_raw
138            .primary_output
139            .map(|id| {
140                let id = commodities.get_id(id.trim())?;
141                Ok(id.clone())
142            })
143            .transpose()?;
144
145        let capacity_to_activity = process_raw
146            .capacity_to_activity
147            .unwrap_or(ActivityPerCapacity(1.0));
148
149        // Validate unit_size
150        if process_raw.unit_size.is_some() {
151            ensure!(
152                process_raw.unit_size > Some(Capacity(0.0)),
153                "Error in process {}: unit_size must be > 0 or None",
154                process_raw.id
155            );
156        }
157
158        // Validate capacity_to_activity
159        ensure!(
160            capacity_to_activity >= ActivityPerCapacity(0.0),
161            "Error in process {}: capacity_to_activity must be >= 0",
162            process_raw.id
163        );
164
165        let process = Process {
166            id: process_raw.id.clone(),
167            description: process_raw.description,
168            years,
169            activity_limits: ProcessActivityLimitsMap::new(),
170            flows: ProcessFlowsMap::new(),
171            parameters: ProcessParameterMap::new(),
172            regions,
173            primary_output,
174            capacity_to_activity,
175            investment_constraints: ProcessInvestmentConstraintsMap::new(),
176            unit_size: process_raw.unit_size,
177        };
178
179        ensure!(
180            processes.insert(process_raw.id, process.into()).is_none(),
181            "Duplicate process ID"
182        );
183    }
184
185    Ok(processes)
186}