1use crate::agent::AgentID;
3use crate::asset::{Asset, AssetID, AssetRef};
4use crate::commodity::CommodityID;
5use crate::process::ProcessID;
6use crate::region::RegionID;
7use crate::simulation::CommodityPrices;
8use crate::simulation::investment::appraisal::AppraisalOutput;
9use crate::simulation::optimisation::{FlowMap, Solution};
10use crate::time_slice::TimeSliceID;
11use crate::units::{
12 Activity, Capacity, Flow, Money, MoneyPerActivity, MoneyPerCapacity, MoneyPerFlow,
13};
14use anyhow::{Context, Result, ensure};
15use csv;
16use indexmap::IndexMap;
17use itertools::Itertools;
18use serde::{Deserialize, Serialize};
19use std::fs;
20use std::fs::File;
21use std::path::{Path, PathBuf};
22
23pub mod metadata;
24use metadata::write_metadata;
25
26const COMMODITY_FLOWS_FILE_NAME: &str = "commodity_flows.csv";
28
29const COMMODITY_PRICES_FILE_NAME: &str = "commodity_prices.csv";
31
32const ASSETS_FILE_NAME: &str = "assets.csv";
34
35const ACTIVITY_ASSET_DISPATCH: &str = "debug_dispatch_assets.csv";
37
38const COMMODITY_BALANCE_DUALS_FILE_NAME: &str = "debug_commodity_balance_duals.csv";
40
41const UNMET_DEMAND_FILE_NAME: &str = "debug_unmet_demand.csv";
43
44const SOLVER_VALUES_FILE_NAME: &str = "debug_solver.csv";
46
47const APPRAISAL_RESULTS_FILE_NAME: &str = "debug_appraisal_results.csv";
49
50const APPRAISAL_RESULTS_TIME_SLICE_FILE_NAME: &str = "debug_appraisal_results_time_slices.csv";
52
53pub fn get_output_dir(model_dir: &Path, results_root: PathBuf) -> Result<PathBuf> {
55 let model_dir = model_dir
58 .canonicalize() .context("Could not resolve path to model")?;
60
61 let model_name = model_dir
62 .file_name()
63 .context("Model cannot be in root folder")?
64 .to_str()
65 .context("Invalid chars in model dir name")?;
66
67 Ok([results_root, model_name.into()].iter().collect())
69}
70
71pub fn get_graphs_dir(model_dir: &Path, graph_results_root: PathBuf) -> Result<PathBuf> {
73 let model_dir = model_dir
74 .canonicalize() .context("Could not resolve path to model")?;
76 let model_name = model_dir
77 .file_name()
78 .context("Model cannot be in root folder")?
79 .to_str()
80 .context("Invalid chars in model dir name")?;
81 Ok([graph_results_root, model_name.into()].iter().collect())
82}
83
84pub fn create_output_directory(output_dir: &Path, allow_overwrite: bool) -> Result<bool> {
95 let overwrite = if let Ok(mut it) = fs::read_dir(output_dir) {
97 if it.next().is_none() {
98 return Ok(false);
100 }
101
102 ensure!(
103 allow_overwrite,
104 "Output folder already exists and is not empty. \
105 Please delete the folder or pass the --overwrite command-line option."
106 );
107
108 fs::remove_dir_all(output_dir).context("Could not delete folder")?;
109 true
110 } else {
111 false
112 };
113
114 fs::create_dir_all(output_dir)?;
116
117 Ok(overwrite)
118}
119
120#[derive(Serialize, Deserialize, Debug, PartialEq)]
122struct AssetRow {
123 asset_id: AssetID,
124 process_id: ProcessID,
125 region_id: RegionID,
126 agent_id: AgentID,
127 commission_year: u32,
128 decommission_year: Option<u32>,
129 capacity: Capacity,
130}
131
132impl AssetRow {
133 fn new(asset: &Asset) -> Self {
135 Self {
136 asset_id: asset.id().unwrap(),
137 process_id: asset.process_id().clone(),
138 region_id: asset.region_id().clone(),
139 agent_id: asset.agent_id().unwrap().clone(),
140 commission_year: asset.commission_year(),
141 decommission_year: asset.decommission_year(),
142 capacity: asset.capacity(),
143 }
144 }
145}
146
147#[derive(Serialize, Deserialize, Debug, PartialEq)]
149struct CommodityFlowRow {
150 milestone_year: u32,
151 asset_id: AssetID,
152 commodity_id: CommodityID,
153 time_slice: TimeSliceID,
154 flow: Flow,
155}
156
157#[derive(Serialize, Deserialize, Debug, PartialEq)]
159struct CommodityPriceRow {
160 milestone_year: u32,
161 commodity_id: CommodityID,
162 region_id: RegionID,
163 time_slice: TimeSliceID,
164 price: MoneyPerFlow,
165}
166
167#[derive(Serialize, Deserialize, Debug, PartialEq)]
169struct ActivityRow {
170 milestone_year: u32,
171 run_description: String,
172 asset_id: Option<AssetID>,
173 process_id: ProcessID,
174 region_id: RegionID,
175 time_slice: TimeSliceID,
176 activity: Option<Activity>,
177 activity_dual: Option<MoneyPerActivity>,
178 column_dual: Option<MoneyPerActivity>,
179}
180
181#[derive(Serialize, Deserialize, Debug, PartialEq)]
183struct CommodityBalanceDualsRow {
184 milestone_year: u32,
185 run_description: String,
186 commodity_id: CommodityID,
187 region_id: RegionID,
188 time_slice: TimeSliceID,
189 value: MoneyPerFlow,
190}
191
192#[derive(Serialize, Deserialize, Debug, PartialEq)]
194struct UnmetDemandRow {
195 milestone_year: u32,
196 run_description: String,
197 commodity_id: CommodityID,
198 region_id: RegionID,
199 time_slice: TimeSliceID,
200 value: Flow,
201}
202
203#[derive(Serialize, Deserialize, Debug, PartialEq)]
205struct SolverValuesRow {
206 milestone_year: u32,
207 run_description: String,
208 objective_value: Money,
209}
210
211#[derive(Serialize, Deserialize, Debug, PartialEq)]
213struct AppraisalResultsRow {
214 milestone_year: u32,
215 run_description: String,
216 asset_id: Option<AssetID>,
217 process_id: ProcessID,
218 region_id: RegionID,
219 capacity: Capacity,
220 capacity_coefficient: MoneyPerCapacity,
221 metric: f64,
222}
223
224#[derive(Serialize, Deserialize, Debug, PartialEq)]
226struct AppraisalResultsTimeSliceRow {
227 milestone_year: u32,
228 run_description: String,
229 asset_id: Option<AssetID>,
230 process_id: ProcessID,
231 region_id: RegionID,
232 time_slice: TimeSliceID,
233 activity: Activity,
234 activity_coefficient: MoneyPerActivity,
235 demand: Flow,
236 unmet_demand: Flow,
237}
238
239struct DebugDataWriter {
241 context: Option<String>,
242 commodity_balance_duals_writer: csv::Writer<File>,
243 unmet_demand_writer: csv::Writer<File>,
244 solver_values_writer: csv::Writer<File>,
245 appraisal_results_writer: csv::Writer<File>,
246 appraisal_results_time_slice_writer: csv::Writer<File>,
247 dispatch_asset_writer: csv::Writer<File>,
248}
249
250impl DebugDataWriter {
251 fn create(output_path: &Path) -> Result<Self> {
257 let new_writer = |file_name| {
258 let file_path = output_path.join(file_name);
259 csv::Writer::from_path(file_path)
260 };
261
262 Ok(Self {
263 context: None,
264 commodity_balance_duals_writer: new_writer(COMMODITY_BALANCE_DUALS_FILE_NAME)?,
265 unmet_demand_writer: new_writer(UNMET_DEMAND_FILE_NAME)?,
266 solver_values_writer: new_writer(SOLVER_VALUES_FILE_NAME)?,
267 appraisal_results_writer: new_writer(APPRAISAL_RESULTS_FILE_NAME)?,
268 appraisal_results_time_slice_writer: new_writer(
269 APPRAISAL_RESULTS_TIME_SLICE_FILE_NAME,
270 )?,
271 dispatch_asset_writer: new_writer(ACTIVITY_ASSET_DISPATCH)?,
272 })
273 }
274
275 fn with_context(&self, run_description: &str) -> String {
277 if let Some(context) = &self.context {
278 format!("{context}; {run_description}")
279 } else {
280 run_description.to_string()
281 }
282 }
283
284 fn write_dispatch_debug_info(
286 &mut self,
287 milestone_year: u32,
288 run_description: &str,
289 solution: &Solution,
290 ) -> Result<()> {
291 self.write_activity(
292 milestone_year,
293 run_description,
294 solution.iter_activity(),
295 solution.iter_activity_duals(),
296 solution.iter_column_duals(),
297 )?;
298 self.write_commodity_balance_duals(
299 milestone_year,
300 run_description,
301 solution.iter_commodity_balance_duals(),
302 )?;
303 self.write_unmet_demand(
304 milestone_year,
305 run_description,
306 solution.iter_unmet_demand(),
307 )?;
308 self.write_solver_values(milestone_year, run_description, solution.objective_value)?;
309 Ok(())
310 }
311
312 fn write_activity<'a, I, J, K>(
314 &mut self,
315 milestone_year: u32,
316 run_description: &str,
317 iter_activity: I,
318 iter_activity_duals: J,
319 iter_column_duals: K,
320 ) -> Result<()>
321 where
322 I: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, Activity)>,
323 J: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, MoneyPerActivity)>,
324 K: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, MoneyPerActivity)>,
325 {
326 type CompiledActivityData = (
328 Option<Activity>,
329 Option<MoneyPerActivity>,
330 Option<MoneyPerActivity>,
331 );
332 let mut map: IndexMap<(&AssetRef, &TimeSliceID), CompiledActivityData> = IndexMap::new();
333
334 for (asset, time_slice, activity) in iter_activity {
336 map.entry((asset, time_slice)).or_default().0 = Some(activity);
337 }
338 for (asset, time_slice, activity_dual) in iter_activity_duals {
340 map.entry((asset, time_slice)).or_default().1 = Some(activity_dual);
341 }
342 for (asset, time_slice, column_dual) in iter_column_duals {
344 map.entry((asset, time_slice)).or_default().2 = Some(column_dual);
345 }
346
347 for (asset, time_slice, activity, activity_dual, column_dual) in
348 map.iter()
349 .map(|(&(agent, ts), &(activity, activity_dual, column_dual))| {
350 (agent, ts, activity, activity_dual, column_dual)
351 })
352 {
353 let row = ActivityRow {
354 milestone_year,
355 run_description: self.with_context(run_description),
356 asset_id: asset.id(),
357 process_id: asset.process_id().clone(),
358 region_id: asset.region_id().clone(),
359 time_slice: time_slice.clone(),
360 activity,
361 activity_dual,
362 column_dual,
363 };
364 self.dispatch_asset_writer.serialize(row)?;
365 }
366
367 Ok(())
368 }
369
370 fn write_commodity_balance_duals<'a, I>(
372 &mut self,
373 milestone_year: u32,
374 run_description: &str,
375 iter: I,
376 ) -> Result<()>
377 where
378 I: Iterator<Item = (&'a CommodityID, &'a RegionID, &'a TimeSliceID, MoneyPerFlow)>,
379 {
380 for (commodity_id, region_id, time_slice, value) in iter {
381 let row = CommodityBalanceDualsRow {
382 milestone_year,
383 run_description: self.with_context(run_description),
384 commodity_id: commodity_id.clone(),
385 region_id: region_id.clone(),
386 time_slice: time_slice.clone(),
387 value,
388 };
389 self.commodity_balance_duals_writer.serialize(row)?;
390 }
391
392 Ok(())
393 }
394
395 fn write_unmet_demand<'a, I>(
397 &mut self,
398 milestone_year: u32,
399 run_description: &str,
400 iter: I,
401 ) -> Result<()>
402 where
403 I: Iterator<Item = (&'a CommodityID, &'a RegionID, &'a TimeSliceID, Flow)>,
404 {
405 for (commodity_id, region_id, time_slice, value) in iter {
406 let row = UnmetDemandRow {
407 milestone_year,
408 run_description: self.with_context(run_description),
409 commodity_id: commodity_id.clone(),
410 region_id: region_id.clone(),
411 time_slice: time_slice.clone(),
412 value,
413 };
414 self.unmet_demand_writer.serialize(row)?;
415 }
416
417 Ok(())
418 }
419
420 fn write_solver_values(
422 &mut self,
423 milestone_year: u32,
424 run_description: &str,
425 objective_value: Money,
426 ) -> Result<()> {
427 let row = SolverValuesRow {
428 milestone_year,
429 run_description: self.with_context(run_description),
430 objective_value,
431 };
432 self.solver_values_writer.serialize(row)?;
433 self.solver_values_writer.flush()?;
434
435 Ok(())
436 }
437
438 fn write_appraisal_results(
440 &mut self,
441 milestone_year: u32,
442 run_description: &str,
443 appraisal_results: &[AppraisalOutput],
444 ) -> Result<()> {
445 for result in appraisal_results {
446 let row = AppraisalResultsRow {
447 milestone_year,
448 run_description: self.with_context(run_description),
449 asset_id: result.asset.id(),
450 process_id: result.asset.process_id().clone(),
451 region_id: result.asset.region_id().clone(),
452 capacity: result.capacity,
453 capacity_coefficient: result.coefficients.capacity_coefficient,
454 metric: result.metric,
455 };
456 self.appraisal_results_writer.serialize(row)?;
457 }
458
459 Ok(())
460 }
461
462 fn write_appraisal_time_slice_results(
464 &mut self,
465 milestone_year: u32,
466 run_description: &str,
467 appraisal_results: &[AppraisalOutput],
468 ) -> Result<()> {
469 for result in appraisal_results {
470 for (time_slice, activity) in &result.activity {
471 let activity_coefficient = result.coefficients.activity_coefficients[time_slice];
472 let demand = result.demand[time_slice];
473 let unmet_demand = result.unmet_demand[time_slice];
474 let row = AppraisalResultsTimeSliceRow {
475 milestone_year,
476 run_description: self.with_context(run_description),
477 asset_id: result.asset.id(),
478 process_id: result.asset.process_id().clone(),
479 region_id: result.asset.region_id().clone(),
480 time_slice: time_slice.clone(),
481 activity: *activity,
482 activity_coefficient,
483 demand,
484 unmet_demand,
485 };
486 self.appraisal_results_time_slice_writer.serialize(row)?;
487 }
488 }
489
490 Ok(())
491 }
492
493 fn flush(&mut self) -> Result<()> {
495 self.commodity_balance_duals_writer.flush()?;
496 self.unmet_demand_writer.flush()?;
497 self.solver_values_writer.flush()?;
498 self.appraisal_results_writer.flush()?;
499 self.appraisal_results_time_slice_writer.flush()?;
500 self.dispatch_asset_writer.flush()?;
501
502 Ok(())
503 }
504}
505
506pub struct DataWriter {
508 assets_path: PathBuf,
509 flows_writer: csv::Writer<File>,
510 prices_writer: csv::Writer<File>,
511 debug_writer: Option<DebugDataWriter>,
512}
513
514impl DataWriter {
515 pub fn create(output_path: &Path, model_path: &Path, save_debug_info: bool) -> Result<Self> {
523 write_metadata(output_path, model_path).context("Failed to save metadata")?;
524
525 let new_writer = |file_name| {
526 let file_path = output_path.join(file_name);
527 csv::Writer::from_path(file_path)
528 };
529
530 let debug_writer = if save_debug_info {
531 Some(DebugDataWriter::create(output_path)?)
533 } else {
534 None
535 };
536
537 Ok(Self {
538 assets_path: output_path.join(ASSETS_FILE_NAME),
539 flows_writer: new_writer(COMMODITY_FLOWS_FILE_NAME)?,
540 prices_writer: new_writer(COMMODITY_PRICES_FILE_NAME)?,
541 debug_writer,
542 })
543 }
544
545 pub fn write_dispatch_debug_info(
547 &mut self,
548 milestone_year: u32,
549 run_description: &str,
550 solution: &Solution,
551 ) -> Result<()> {
552 if let Some(wtr) = &mut self.debug_writer {
553 wtr.write_dispatch_debug_info(milestone_year, run_description, solution)?;
554 }
555
556 Ok(())
557 }
558
559 pub fn write_appraisal_debug_info(
561 &mut self,
562 milestone_year: u32,
563 run_description: &str,
564 appraisal_results: &[AppraisalOutput],
565 ) -> Result<()> {
566 if let Some(wtr) = &mut self.debug_writer {
567 wtr.write_appraisal_results(milestone_year, run_description, appraisal_results)?;
568 wtr.write_appraisal_time_slice_results(
569 milestone_year,
570 run_description,
571 appraisal_results,
572 )?;
573 }
574
575 Ok(())
576 }
577
578 pub fn write_assets<'a, I>(&mut self, assets: I) -> Result<()>
590 where
591 I: Iterator<Item = &'a AssetRef>,
592 {
593 let mut writer = csv::Writer::from_path(&self.assets_path)?;
594 for asset in assets.sorted() {
595 let row = AssetRow::new(asset);
596 writer.serialize(row)?;
597 }
598 writer.flush()?;
599
600 Ok(())
601 }
602
603 pub fn write_flows(&mut self, milestone_year: u32, flow_map: &FlowMap) -> Result<()> {
605 for ((asset, commodity_id, time_slice), flow) in flow_map {
606 let row = CommodityFlowRow {
607 milestone_year,
608 asset_id: asset.id().unwrap(),
609 commodity_id: commodity_id.clone(),
610 time_slice: time_slice.clone(),
611 flow: *flow,
612 };
613 self.flows_writer.serialize(row)?;
614 }
615
616 Ok(())
617 }
618
619 pub fn write_prices(&mut self, milestone_year: u32, prices: &CommodityPrices) -> Result<()> {
621 for (commodity_id, region_id, time_slice, price) in prices.iter() {
622 let row = CommodityPriceRow {
623 milestone_year,
624 commodity_id: commodity_id.clone(),
625 region_id: region_id.clone(),
626 time_slice: time_slice.clone(),
627 price,
628 };
629 self.prices_writer.serialize(row)?;
630 }
631
632 Ok(())
633 }
634
635 pub fn flush(&mut self) -> Result<()> {
637 self.flows_writer.flush()?;
638 self.prices_writer.flush()?;
639 if let Some(wtr) = &mut self.debug_writer {
640 wtr.flush()?;
641 }
642
643 Ok(())
644 }
645
646 pub fn set_debug_context(&mut self, context: String) {
648 if let Some(wtr) = &mut self.debug_writer {
649 wtr.context = Some(context);
650 }
651 }
652
653 pub fn clear_debug_context(&mut self) {
655 if let Some(wtr) = &mut self.debug_writer {
656 wtr.context = None;
657 }
658 }
659}
660
661#[cfg(test)]
662mod tests {
663 use super::*;
664 use crate::asset::AssetPool;
665 use crate::fixture::{appraisal_output, asset, assets, commodity_id, region_id, time_slice};
666 use crate::simulation::investment::appraisal::AppraisalOutput;
667 use crate::time_slice::TimeSliceID;
668 use indexmap::indexmap;
669 use itertools::{Itertools, assert_equal};
670 use rstest::rstest;
671 use std::iter;
672 use tempfile::tempdir;
673
674 #[rstest]
675 fn test_write_assets(assets: AssetPool) {
676 let dir = tempdir().unwrap();
677
678 {
680 let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
681 writer.write_assets(assets.iter_active()).unwrap();
682 writer.flush().unwrap();
683 }
684
685 let asset = assets.iter_active().next().unwrap();
687 let expected = AssetRow::new(asset);
688 let records: Vec<AssetRow> = csv::Reader::from_path(dir.path().join(ASSETS_FILE_NAME))
689 .unwrap()
690 .into_deserialize()
691 .try_collect()
692 .unwrap();
693 assert_equal(records, iter::once(expected));
694 }
695
696 #[rstest]
697 fn test_write_flows(assets: AssetPool, commodity_id: CommodityID, time_slice: TimeSliceID) {
698 let milestone_year = 2020;
699 let asset = assets.iter_active().next().unwrap();
700 let flow_map = indexmap! {
701 (asset.clone(), commodity_id.clone(), time_slice.clone()) => Flow(42.0)
702 };
703
704 let dir = tempdir().unwrap();
706 {
707 let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
708 writer.write_flows(milestone_year, &flow_map).unwrap();
709 writer.flush().unwrap();
710 }
711
712 let expected = CommodityFlowRow {
714 milestone_year,
715 asset_id: asset.id().unwrap(),
716 commodity_id,
717 time_slice,
718 flow: Flow(42.0),
719 };
720 let records: Vec<CommodityFlowRow> =
721 csv::Reader::from_path(dir.path().join(COMMODITY_FLOWS_FILE_NAME))
722 .unwrap()
723 .into_deserialize()
724 .try_collect()
725 .unwrap();
726 assert_equal(records, iter::once(expected));
727 }
728
729 #[rstest]
730 fn test_write_prices(commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID) {
731 let milestone_year = 2020;
732 let price = MoneyPerFlow(42.0);
733 let mut prices = CommodityPrices::default();
734 prices.insert(&commodity_id, ®ion_id, &time_slice, price);
735
736 let dir = tempdir().unwrap();
737
738 {
740 let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
741 writer.write_prices(milestone_year, &prices).unwrap();
742 writer.flush().unwrap();
743 }
744
745 let expected = CommodityPriceRow {
747 milestone_year,
748 commodity_id,
749 region_id,
750 time_slice,
751 price,
752 };
753 let records: Vec<CommodityPriceRow> =
754 csv::Reader::from_path(dir.path().join(COMMODITY_PRICES_FILE_NAME))
755 .unwrap()
756 .into_deserialize()
757 .try_collect()
758 .unwrap();
759 assert_equal(records, iter::once(expected));
760 }
761
762 #[rstest]
763 fn test_write_commodity_balance_duals(
764 commodity_id: CommodityID,
765 region_id: RegionID,
766 time_slice: TimeSliceID,
767 ) {
768 let milestone_year = 2020;
769 let run_description = "test_run".to_string();
770 let value = MoneyPerFlow(0.5);
771 let dir = tempdir().unwrap();
772
773 {
775 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
776 writer
777 .write_commodity_balance_duals(
778 milestone_year,
779 &run_description,
780 iter::once((&commodity_id, ®ion_id, &time_slice, value)),
781 )
782 .unwrap();
783 writer.flush().unwrap();
784 }
785
786 let expected = CommodityBalanceDualsRow {
788 milestone_year,
789 run_description,
790 commodity_id,
791 region_id,
792 time_slice,
793 value,
794 };
795 let records: Vec<CommodityBalanceDualsRow> =
796 csv::Reader::from_path(dir.path().join(COMMODITY_BALANCE_DUALS_FILE_NAME))
797 .unwrap()
798 .into_deserialize()
799 .try_collect()
800 .unwrap();
801 assert_equal(records, iter::once(expected));
802 }
803
804 #[rstest]
805 fn test_write_unmet_demand(
806 commodity_id: CommodityID,
807 region_id: RegionID,
808 time_slice: TimeSliceID,
809 ) {
810 let milestone_year = 2020;
811 let run_description = "test_run".to_string();
812 let value = Flow(0.5);
813 let dir = tempdir().unwrap();
814
815 {
817 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
818 writer
819 .write_unmet_demand(
820 milestone_year,
821 &run_description,
822 iter::once((&commodity_id, ®ion_id, &time_slice, value)),
823 )
824 .unwrap();
825 writer.flush().unwrap();
826 }
827
828 let expected = UnmetDemandRow {
830 milestone_year,
831 run_description,
832 commodity_id: commodity_id,
833 region_id: region_id,
834 time_slice,
835 value,
836 };
837 let records: Vec<UnmetDemandRow> =
838 csv::Reader::from_path(dir.path().join(UNMET_DEMAND_FILE_NAME))
839 .unwrap()
840 .into_deserialize()
841 .try_collect()
842 .unwrap();
843 assert_equal(records, iter::once(expected));
844 }
845
846 #[rstest]
847 fn test_write_activity(assets: AssetPool, time_slice: TimeSliceID) {
848 let milestone_year = 2020;
849 let run_description = "test_run".to_string();
850 let activity = Activity(100.5);
851 let activity_dual = MoneyPerActivity(-1.5);
852 let column_dual = MoneyPerActivity(5.0);
853 let dir = tempdir().unwrap();
854 let asset = assets.iter_active().next().unwrap();
855
856 {
858 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
859 writer
860 .write_activity(
861 milestone_year,
862 &run_description,
863 iter::once((asset, &time_slice, activity)),
864 iter::once((asset, &time_slice, activity_dual)),
865 iter::once((asset, &time_slice, column_dual)),
866 )
867 .unwrap();
868 writer.flush().unwrap();
869 }
870
871 let expected = ActivityRow {
873 milestone_year,
874 run_description,
875 asset_id: asset.id(),
876 process_id: asset.process_id().clone(),
877 region_id: asset.region_id().clone(),
878 time_slice,
879 activity: Some(activity),
880 activity_dual: Some(activity_dual),
881 column_dual: Some(column_dual),
882 };
883 let records: Vec<ActivityRow> =
884 csv::Reader::from_path(dir.path().join(ACTIVITY_ASSET_DISPATCH))
885 .unwrap()
886 .into_deserialize()
887 .try_collect()
888 .unwrap();
889 assert_equal(records, iter::once(expected));
890 }
891
892 #[rstest]
893 fn test_write_activity_with_missing_keys(assets: AssetPool, time_slice: TimeSliceID) {
894 let milestone_year = 2020;
895 let run_description = "test_run".to_string();
896 let activity = Activity(100.5);
897 let dir = tempdir().unwrap();
898 let asset = assets.iter_active().next().unwrap();
899
900 {
902 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
903 writer
904 .write_activity(
905 milestone_year,
906 &run_description,
907 iter::once((asset, &time_slice, activity)),
908 iter::empty::<(&AssetRef, &TimeSliceID, MoneyPerActivity)>(),
909 iter::empty::<(&AssetRef, &TimeSliceID, MoneyPerActivity)>(),
910 )
911 .unwrap();
912 writer.flush().unwrap();
913 }
914
915 let expected = ActivityRow {
917 milestone_year,
918 run_description,
919 asset_id: asset.id(),
920 process_id: asset.process_id().clone(),
921 region_id: asset.region_id().clone(),
922 time_slice,
923 activity: Some(activity),
924 activity_dual: None,
925 column_dual: None,
926 };
927 let records: Vec<ActivityRow> =
928 csv::Reader::from_path(dir.path().join(ACTIVITY_ASSET_DISPATCH))
929 .unwrap()
930 .into_deserialize()
931 .try_collect()
932 .unwrap();
933 assert_equal(records, iter::once(expected));
934 }
935
936 #[rstest]
937 fn test_write_solver_values() {
938 let milestone_year = 2020;
939 let run_description = "test_run".to_string();
940 let objective_value = Money(1234.56);
941 let dir = tempdir().unwrap();
942
943 {
945 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
946 writer
947 .write_solver_values(milestone_year, &run_description, objective_value)
948 .unwrap();
949 writer.flush().unwrap();
950 }
951
952 let expected = SolverValuesRow {
954 milestone_year,
955 run_description,
956 objective_value,
957 };
958 let records: Vec<SolverValuesRow> =
959 csv::Reader::from_path(dir.path().join(SOLVER_VALUES_FILE_NAME))
960 .unwrap()
961 .into_deserialize()
962 .try_collect()
963 .unwrap();
964 assert_equal(records, iter::once(expected));
965 }
966
967 #[rstest]
968 fn test_write_appraisal_results(asset: Asset, appraisal_output: AppraisalOutput) {
969 let milestone_year = 2020;
970 let run_description = "test_run".to_string();
971 let dir = tempdir().unwrap();
972
973 {
975 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
976 writer
977 .write_appraisal_results(milestone_year, &run_description, &[appraisal_output])
978 .unwrap();
979 writer.flush().unwrap();
980 }
981
982 let expected = AppraisalResultsRow {
984 milestone_year,
985 run_description,
986 asset_id: None,
987 process_id: asset.process_id().clone(),
988 region_id: asset.region_id().clone(),
989 capacity: Capacity(42.0),
990 capacity_coefficient: MoneyPerCapacity(3.14),
991 metric: 4.14,
992 };
993 let records: Vec<AppraisalResultsRow> =
994 csv::Reader::from_path(dir.path().join(APPRAISAL_RESULTS_FILE_NAME))
995 .unwrap()
996 .into_deserialize()
997 .try_collect()
998 .unwrap();
999 assert_equal(records, iter::once(expected));
1000 }
1001
1002 #[rstest]
1003 fn test_write_appraisal_time_slice_results(
1004 asset: Asset,
1005 appraisal_output: AppraisalOutput,
1006 time_slice: TimeSliceID,
1007 ) {
1008 let milestone_year = 2020;
1009 let run_description = "test_run".to_string();
1010 let dir = tempdir().unwrap();
1011
1012 {
1014 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
1015 writer
1016 .write_appraisal_time_slice_results(
1017 milestone_year,
1018 &run_description,
1019 &[appraisal_output],
1020 )
1021 .unwrap();
1022 writer.flush().unwrap();
1023 }
1024
1025 let expected = AppraisalResultsTimeSliceRow {
1027 milestone_year,
1028 run_description,
1029 asset_id: None,
1030 process_id: asset.process_id().clone(),
1031 region_id: asset.region_id().clone(),
1032 time_slice: time_slice.clone(),
1033 activity: Activity(10.0),
1034 activity_coefficient: MoneyPerActivity(0.5),
1035 demand: Flow(100.0),
1036 unmet_demand: Flow(5.0),
1037 };
1038 let records: Vec<AppraisalResultsTimeSliceRow> =
1039 csv::Reader::from_path(dir.path().join(APPRAISAL_RESULTS_TIME_SLICE_FILE_NAME))
1040 .unwrap()
1041 .into_deserialize()
1042 .try_collect()
1043 .unwrap();
1044 assert_equal(records, iter::once(expected));
1045 }
1046
1047 #[test]
1048 fn test_create_output_directory_new_directory() {
1049 let temp_dir = tempdir().unwrap();
1050 let output_dir = temp_dir.path().join("new_output");
1051
1052 let result = create_output_directory(&output_dir, false).unwrap();
1054 assert!(!result);
1055 assert!(output_dir.exists());
1056 assert!(output_dir.is_dir());
1057 }
1058
1059 #[test]
1060 fn test_create_output_directory_existing_empty_directory() {
1061 let temp_dir = tempdir().unwrap();
1062 let output_dir = temp_dir.path().join("empty_output");
1063
1064 fs::create_dir(&output_dir).unwrap();
1066
1067 let result = create_output_directory(&output_dir, false).unwrap();
1069 assert!(!result);
1070 assert!(output_dir.exists());
1071 assert!(output_dir.is_dir());
1072 }
1073
1074 #[test]
1075 fn test_create_output_directory_existing_with_files_no_overwrite() {
1076 let temp_dir = tempdir().unwrap();
1077 let output_dir = temp_dir.path().join("output_with_files");
1078
1079 fs::create_dir(&output_dir).unwrap();
1081 fs::write(output_dir.join("existing_file.txt"), "some content").unwrap();
1082
1083 let result = create_output_directory(&output_dir, false);
1085 assert!(result.is_err());
1086 assert!(
1087 result
1088 .unwrap_err()
1089 .to_string()
1090 .contains("Output folder already exists")
1091 );
1092 }
1093
1094 #[test]
1095 fn test_create_output_directory_existing_with_files_allow_overwrite() {
1096 let temp_dir = tempdir().unwrap();
1097 let output_dir = temp_dir.path().join("output_with_files");
1098
1099 fs::create_dir(&output_dir).unwrap();
1101 let file_path = output_dir.join("existing_file.txt");
1102 fs::write(&file_path, "some content").unwrap();
1103
1104 let result = create_output_directory(&output_dir, true).unwrap();
1106 assert!(result);
1107 assert!(output_dir.exists());
1108 assert!(output_dir.is_dir());
1109 assert!(!file_path.exists()); }
1111
1112 #[test]
1113 fn test_create_output_directory_nested_path() {
1114 let temp_dir = tempdir().unwrap();
1115 let output_dir = temp_dir.path().join("nested").join("path").join("output");
1116
1117 let result = create_output_directory(&output_dir, false).unwrap();
1119 assert!(!result);
1120 assert!(output_dir.exists());
1121 assert!(output_dir.is_dir());
1122 }
1123
1124 #[test]
1125 fn test_create_output_directory_existing_subdirs_with_files_allow_overwrite() {
1126 let temp_dir = tempdir().unwrap();
1127 let output_dir = temp_dir.path().join("output_with_subdirs");
1128
1129 fs::create_dir_all(&output_dir.join("subdir")).unwrap();
1131 fs::write(output_dir.join("file1.txt"), "content1").unwrap();
1132 fs::write(output_dir.join("subdir").join("file2.txt"), "content2").unwrap();
1133
1134 let result = create_output_directory(&output_dir, true).unwrap();
1136 assert!(result);
1137 assert!(output_dir.exists());
1138 assert!(output_dir.is_dir());
1139 assert!(!output_dir.join("file1.txt").exists());
1141 assert!(!output_dir.join("subdir").exists());
1142 }
1143}