1use super::super::*;
4use super::demand_slicing::{read_demand_slices, DemandSliceMap};
5use crate::commodity::{Commodity, CommodityID, CommodityType, DemandMap};
6use crate::id::IDCollection;
7use crate::region::RegionID;
8use crate::time_slice::{TimeSliceInfo, TimeSliceLevel};
9use crate::units::Flow;
10use anyhow::{ensure, Result};
11use indexmap::IndexSet;
12use itertools::iproduct;
13use serde::Deserialize;
14use std::collections::HashMap;
15use std::path::Path;
16
17const DEMAND_FILE_NAME: &str = "demand.csv";
18
19#[derive(Debug, Clone, Deserialize, PartialEq)]
21struct Demand {
22 commodity_id: String,
24 region_id: String,
26 year: u32,
28 demand: Flow,
30}
31
32pub type AnnualDemandMap = HashMap<(CommodityID, RegionID, u32), (TimeSliceLevel, Flow)>;
34
35pub type BorrowedCommodityMap<'a> = HashMap<CommodityID, &'a Commodity>;
37
38pub fn read_demand(
52 model_dir: &Path,
53 commodities: &IndexMap<CommodityID, Commodity>,
54 region_ids: &IndexSet<RegionID>,
55 time_slice_info: &TimeSliceInfo,
56 milestone_years: &[u32],
57) -> Result<HashMap<CommodityID, DemandMap>> {
58 let svd_commodities = commodities
60 .iter()
61 .filter(|(_, commodity)| commodity.kind == CommodityType::ServiceDemand)
62 .map(|(id, commodity)| (id.clone(), commodity))
63 .collect();
64
65 let demand = read_demand_file(model_dir, &svd_commodities, region_ids, milestone_years)?;
66 let slices = read_demand_slices(model_dir, &svd_commodities, region_ids, time_slice_info)?;
67
68 Ok(compute_demand_maps(time_slice_info, &demand, &slices))
69}
70
71fn read_demand_file(
84 model_dir: &Path,
85 svd_commodities: &BorrowedCommodityMap,
86 region_ids: &IndexSet<RegionID>,
87 milestone_years: &[u32],
88) -> Result<AnnualDemandMap> {
89 let file_path = model_dir.join(DEMAND_FILE_NAME);
90 let iter = read_csv(&file_path)?;
91 read_demand_from_iter(iter, svd_commodities, region_ids, milestone_years)
92 .with_context(|| input_err_msg(file_path))
93}
94
95fn read_demand_from_iter<I>(
109 iter: I,
110 svd_commodities: &BorrowedCommodityMap,
111 region_ids: &IndexSet<RegionID>,
112 milestone_years: &[u32],
113) -> Result<AnnualDemandMap>
114where
115 I: Iterator<Item = Demand>,
116{
117 let mut map = AnnualDemandMap::new();
118 for demand in iter {
119 let commodity = svd_commodities
120 .get(demand.commodity_id.as_str())
121 .with_context(|| {
122 format!(
123 "Can only provide demand data for SVD commodities. Found entry for '{}'",
124 demand.commodity_id
125 )
126 })?;
127 let region_id = region_ids.get_id(&demand.region_id)?;
128
129 ensure!(
130 milestone_years.binary_search(&demand.year).is_ok(),
131 "Year {} is not a milestone year. \
132 Input of non-milestone years is currently not supported.",
133 demand.year
134 );
135
136 ensure!(
137 demand.demand.is_normal() && demand.demand > Flow(0.0),
138 "Demand must be a valid number greater than zero"
139 );
140
141 ensure!(
142 map.insert(
143 (commodity.id.clone(), region_id.clone(), demand.year),
144 (commodity.time_slice_level, demand.demand)
145 )
146 .is_none(),
147 "Duplicate demand entries (commodity: {}, region: {}, year: {})",
148 commodity.id,
149 region_id,
150 demand.year
151 );
152 }
153
154 for commodity_id in svd_commodities.keys() {
156 let mut missing_keys = Vec::new();
157 for (region_id, year) in iproduct!(region_ids, milestone_years) {
158 if !map.contains_key(&(commodity_id.clone(), region_id.clone(), *year)) {
159 missing_keys.push((region_id.clone(), *year));
160 }
161 }
162 ensure!(
163 missing_keys.is_empty(),
164 "Commodity {} is missing demand data for {:?}",
165 commodity_id,
166 missing_keys
167 );
168 }
169
170 Ok(map)
171}
172
173fn compute_demand_maps(
186 time_slice_info: &TimeSliceInfo,
187 demand: &AnnualDemandMap,
188 slices: &DemandSliceMap,
189) -> HashMap<CommodityID, DemandMap> {
190 let mut map = HashMap::new();
191 for ((commodity_id, region_id, year), (level, annual_demand)) in demand.iter() {
192 for ts_selection in time_slice_info.iter_selections_at_level(*level) {
193 let slice_key = (
194 commodity_id.clone(),
195 region_id.clone(),
196 ts_selection.clone(),
197 );
198
199 let demand_fraction = slices.get(&slice_key).unwrap();
201
202 let map = map
204 .entry(commodity_id.clone())
205 .or_insert_with(DemandMap::new);
206
207 map.insert(
209 (region_id.clone(), *year, ts_selection.clone()),
210 *annual_demand * *demand_fraction,
211 );
212 }
213 }
214
215 map
216}
217
218#[cfg(test)]
219mod tests {
220 use super::*;
221 use crate::fixture::{assert_error, get_svd_map, region_ids, svd_commodity};
222 use rstest::rstest;
223 use std::fs::File;
224 use std::io::Write;
225 use std::path::Path;
226 use tempfile::tempdir;
227
228 #[rstest]
229 fn test_read_demand_from_iter(svd_commodity: Commodity, region_ids: IndexSet<RegionID>) {
230 let svd_commodities = get_svd_map(&svd_commodity);
231 let demand = [
232 Demand {
233 year: 2020,
234 region_id: "GBR".to_string(),
235 commodity_id: "commodity1".to_string(),
236 demand: Flow(10.0),
237 },
238 Demand {
239 year: 2020,
240 region_id: "USA".to_string(),
241 commodity_id: "commodity1".to_string(),
242 demand: Flow(11.0),
243 },
244 ];
245
246 assert!(
248 read_demand_from_iter(demand.into_iter(), &svd_commodities, ®ion_ids, &[2020])
249 .is_ok()
250 );
251 }
252
253 #[rstest]
254 fn test_read_demand_from_iter_bad_commodity_id(
255 svd_commodity: Commodity,
256 region_ids: IndexSet<RegionID>,
257 ) {
258 let svd_commodities = get_svd_map(&svd_commodity);
260 let demand = [
261 Demand {
262 year: 2020,
263 region_id: "GBR".to_string(),
264 commodity_id: "commodity2".to_string(),
265 demand: Flow(10.0),
266 },
267 Demand {
268 year: 2020,
269 region_id: "USA".to_string(),
270 commodity_id: "commodity1".to_string(),
271 demand: Flow(11.0),
272 },
273 ];
274 assert_error!(
275 read_demand_from_iter(demand.into_iter(), &svd_commodities, ®ion_ids, &[2020]),
276 "Can only provide demand data for SVD commodities. Found entry for 'commodity2'"
277 );
278 }
279
280 #[rstest]
281 fn test_read_demand_from_iter_bad_region_id(
282 svd_commodity: Commodity,
283 region_ids: IndexSet<RegionID>,
284 ) {
285 let svd_commodities = get_svd_map(&svd_commodity);
287 let demand = [
288 Demand {
289 year: 2020,
290 region_id: "FRA".to_string(),
291 commodity_id: "commodity1".to_string(),
292 demand: Flow(10.0),
293 },
294 Demand {
295 year: 2020,
296 region_id: "USA".to_string(),
297 commodity_id: "commodity1".to_string(),
298 demand: Flow(11.0),
299 },
300 ];
301 assert_error!(
302 read_demand_from_iter(demand.into_iter(), &svd_commodities, ®ion_ids, &[2020]),
303 "Unknown ID FRA found"
304 );
305 }
306
307 #[rstest]
308 fn test_read_demand_from_iter_bad_year(
309 svd_commodity: Commodity,
310 region_ids: IndexSet<RegionID>,
311 ) {
312 let svd_commodities = get_svd_map(&svd_commodity);
314 let demand = [
315 Demand {
316 year: 2010,
317 region_id: "GBR".to_string(),
318 commodity_id: "commodity1".to_string(),
319 demand: Flow(10.0),
320 },
321 Demand {
322 year: 2020,
323 region_id: "USA".to_string(),
324 commodity_id: "commodity1".to_string(),
325 demand: Flow(11.0),
326 },
327 ];
328 assert_error!(
329 read_demand_from_iter(demand.into_iter(), &svd_commodities, ®ion_ids, &[2020]),
330 "Year 2010 is not a milestone year. \
331 Input of non-milestone years is currently not supported."
332 );
333 }
334
335 #[rstest]
336 #[case(-1.0)]
337 #[case(0.0)]
338 #[case(f64::NAN)]
339 #[case(f64::NEG_INFINITY)]
340 #[case(f64::INFINITY)]
341 fn test_read_demand_from_iter_bad_demand(
342 svd_commodity: Commodity,
343 region_ids: IndexSet<RegionID>,
344 #[case] quantity: f64,
345 ) {
346 let svd_commodities = get_svd_map(&svd_commodity);
348 let demand = [Demand {
349 year: 2020,
350 region_id: "GBR".to_string(),
351 commodity_id: "commodity1".to_string(),
352 demand: Flow(quantity),
353 }];
354 assert_error!(
355 read_demand_from_iter(demand.into_iter(), &svd_commodities, ®ion_ids, &[2020],),
356 "Demand must be a valid number greater than zero"
357 );
358 }
359
360 #[rstest]
361 fn test_read_demand_from_iter_multiple_entries(
362 svd_commodity: Commodity,
363 region_ids: IndexSet<RegionID>,
364 ) {
365 let svd_commodities = get_svd_map(&svd_commodity);
367 let demand = [
368 Demand {
369 year: 2020,
370 region_id: "GBR".to_string(),
371 commodity_id: "commodity1".to_string(),
372 demand: Flow(10.0),
373 },
374 Demand {
375 year: 2020,
376 region_id: "GBR".to_string(),
377 commodity_id: "commodity1".to_string(),
378 demand: Flow(10.0),
379 },
380 Demand {
381 year: 2020,
382 region_id: "USA".to_string(),
383 commodity_id: "commodity1".to_string(),
384 demand: Flow(11.0),
385 },
386 ];
387 assert_error!(
388 read_demand_from_iter(demand.into_iter(), &svd_commodities, ®ion_ids, &[2020]),
389 "Duplicate demand entries (commodity: commodity1, region: GBR, year: 2020)"
390 );
391 }
392
393 #[rstest]
394 fn test_read_demand_from_iter_missing_year(
395 svd_commodity: Commodity,
396 region_ids: IndexSet<RegionID>,
397 ) {
398 let svd_commodities = get_svd_map(&svd_commodity);
400 let demand = Demand {
401 year: 2020,
402 region_id: "GBR".to_string(),
403 commodity_id: "commodity1".to_string(),
404 demand: Flow(10.0),
405 };
406 assert!(read_demand_from_iter(
407 std::iter::once(demand),
408 &svd_commodities,
409 ®ion_ids,
410 &[2020, 2030]
411 )
412 .is_err());
413 }
414
415 fn create_demand_file(dir_path: &Path) {
417 let file_path = dir_path.join(DEMAND_FILE_NAME);
418 let mut file = File::create(file_path).unwrap();
419 writeln!(
420 file,
421 "commodity_id,region_id,year,demand\n\
422 commodity1,GBR,2020,10\n\
423 commodity1,USA,2020,11\n"
424 )
425 .unwrap();
426 }
427
428 #[rstest]
429 fn test_read_demand_file(svd_commodity: Commodity, region_ids: IndexSet<RegionID>) {
430 let svd_commodities = get_svd_map(&svd_commodity);
431 let dir = tempdir().unwrap();
432 create_demand_file(dir.path());
433 let milestone_years = [2020];
434 let expected = AnnualDemandMap::from_iter([
435 (
436 ("commodity1".into(), "GBR".into(), 2020),
437 (TimeSliceLevel::DayNight, Flow(10.0)),
438 ),
439 (
440 ("commodity1".into(), "USA".into(), 2020),
441 (TimeSliceLevel::DayNight, Flow(11.0)),
442 ),
443 ]);
444 let demand =
445 read_demand_file(dir.path(), &svd_commodities, ®ion_ids, &milestone_years).unwrap();
446 assert_eq!(demand, expected);
447 }
448}