1use crate::agent::AgentID;
3use crate::asset::{Asset, AssetID, AssetRef};
4use crate::commodity::CommodityID;
5use crate::process::ProcessID;
6use crate::region::RegionID;
7use crate::simulation::CommodityPrices;
8use crate::simulation::investment::appraisal::AppraisalOutput;
9use crate::simulation::optimisation::{FlowMap, Solution};
10use crate::time_slice::TimeSliceID;
11use crate::units::{Activity, Capacity, Flow, Money, MoneyPerActivity, MoneyPerFlow};
12use anyhow::{Context, Result, ensure};
13use csv;
14use itertools::Itertools;
15use serde::{Deserialize, Serialize};
16use std::fs;
17use std::fs::File;
18use std::path::{Path, PathBuf};
19
20pub mod metadata;
21use metadata::write_metadata;
22
23const OUTPUT_DIRECTORY_ROOT: &str = "muse2_results";
25
26const COMMODITY_FLOWS_FILE_NAME: &str = "commodity_flows.csv";
28
29const COMMODITY_PRICES_FILE_NAME: &str = "commodity_prices.csv";
31
32const ASSETS_FILE_NAME: &str = "assets.csv";
34
35const ACTIVITY_FILE_NAME: &str = "debug_activity.csv";
37
38const COMMODITY_BALANCE_DUALS_FILE_NAME: &str = "debug_commodity_balance_duals.csv";
40
41const ACTIVITY_DUALS_FILE_NAME: &str = "debug_activity_duals.csv";
43
44const COLUMN_DUALS_FILE_NAME: &str = "debug_column_duals.csv";
46
47const SOLVER_VALUES_FILE_NAME: &str = "debug_solver.csv";
49
50const APPRAISAL_RESULTS_FILE_NAME: &str = "debug_appraisal_results.csv";
52
53const GRAPHS_DIRECTORY_ROOT: &str = "muse2_graphs";
55
56pub fn get_output_dir(model_dir: &Path) -> Result<PathBuf> {
58 let model_dir = model_dir
61 .canonicalize() .context("Could not resolve path to model")?;
63
64 let model_name = model_dir
65 .file_name()
66 .context("Model cannot be in root folder")?
67 .to_str()
68 .context("Invalid chars in model dir name")?;
69
70 Ok([OUTPUT_DIRECTORY_ROOT, model_name].iter().collect())
72}
73
74pub fn get_graphs_dir(model_dir: &Path) -> Result<PathBuf> {
76 let model_dir = model_dir
77 .canonicalize() .context("Could not resolve path to model")?;
79 let model_name = model_dir
80 .file_name()
81 .context("Model cannot be in root folder")?
82 .to_str()
83 .context("Invalid chars in model dir name")?;
84 Ok([GRAPHS_DIRECTORY_ROOT, model_name].iter().collect())
85}
86
87pub fn create_output_directory(output_dir: &Path, allow_overwrite: bool) -> Result<bool> {
98 let overwrite = if let Ok(mut it) = fs::read_dir(output_dir) {
100 if it.next().is_none() {
101 return Ok(false);
103 }
104
105 ensure!(
106 allow_overwrite,
107 "Output folder already exists and is not empty. \
108 Please delete the folder or pass the --overwrite command-line option."
109 );
110
111 fs::remove_dir_all(output_dir).context("Could not delete folder")?;
112 true
113 } else {
114 false
115 };
116
117 fs::create_dir_all(output_dir)?;
119
120 Ok(overwrite)
121}
122
123#[derive(Serialize, Deserialize, Debug, PartialEq)]
125struct AssetRow {
126 asset_id: AssetID,
127 process_id: ProcessID,
128 region_id: RegionID,
129 agent_id: AgentID,
130 commission_year: u32,
131 decommission_year: Option<u32>,
132 capacity: Capacity,
133}
134
135impl AssetRow {
136 fn new(asset: &Asset) -> Self {
138 Self {
139 asset_id: asset.id().unwrap(),
140 process_id: asset.process_id().clone(),
141 region_id: asset.region_id().clone(),
142 agent_id: asset.agent_id().unwrap().clone(),
143 commission_year: asset.commission_year(),
144 decommission_year: asset.decommission_year(),
145 capacity: asset.capacity(),
146 }
147 }
148}
149
150#[derive(Serialize, Deserialize, Debug, PartialEq)]
152struct CommodityFlowRow {
153 milestone_year: u32,
154 asset_id: AssetID,
155 commodity_id: CommodityID,
156 time_slice: TimeSliceID,
157 flow: Flow,
158}
159
160#[derive(Serialize, Deserialize, Debug, PartialEq)]
162struct CommodityPriceRow {
163 milestone_year: u32,
164 commodity_id: CommodityID,
165 region_id: RegionID,
166 time_slice: TimeSliceID,
167 price: MoneyPerFlow,
168}
169
170#[derive(Serialize, Deserialize, Debug, PartialEq)]
172struct ActivityRow {
173 milestone_year: u32,
174 run_description: String,
175 asset_id: Option<AssetID>,
176 process_id: ProcessID,
177 region_id: RegionID,
178 time_slice: TimeSliceID,
179 activity: Activity,
180}
181
182#[derive(Serialize, Deserialize, Debug, PartialEq)]
184struct ActivityDualsRow {
185 milestone_year: u32,
186 run_description: String,
187 asset_id: Option<AssetID>,
188 process_id: ProcessID,
189 region_id: RegionID,
190 time_slice: TimeSliceID,
191 value: MoneyPerActivity,
192}
193
194#[derive(Serialize, Deserialize, Debug, PartialEq)]
196struct CommodityBalanceDualsRow {
197 milestone_year: u32,
198 run_description: String,
199 commodity_id: CommodityID,
200 region_id: RegionID,
201 time_slice: TimeSliceID,
202 value: MoneyPerFlow,
203}
204
205#[derive(Serialize, Deserialize, Debug, PartialEq)]
207struct ColumnDualsRow {
208 milestone_year: u32,
209 run_description: String,
210 asset_id: Option<AssetID>,
211 process_id: ProcessID,
212 region_id: RegionID,
213 time_slice: TimeSliceID,
214 value: MoneyPerActivity,
215}
216#[derive(Serialize, Deserialize, Debug, PartialEq)]
218struct SolverValuesRow {
219 milestone_year: u32,
220 run_description: String,
221 objective_value: Money,
222}
223
224#[derive(Serialize, Deserialize, Debug, PartialEq)]
226struct AppraisalResultsRow {
227 milestone_year: u32,
228 run_description: String,
229 asset_id: Option<AssetID>,
230 process_id: ProcessID,
231 region_id: RegionID,
232 capacity: Capacity,
233 unmet_demand: Flow,
234 metric: f64,
235}
236
237struct DebugDataWriter {
239 context: Option<String>,
240 activity_writer: csv::Writer<File>,
241 commodity_balance_duals_writer: csv::Writer<File>,
242 activity_duals_writer: csv::Writer<File>,
243 column_duals_writer: csv::Writer<File>,
244 solver_values_writer: csv::Writer<File>,
245 appraisal_results_writer: csv::Writer<File>,
246}
247
248impl DebugDataWriter {
249 fn create(output_path: &Path) -> Result<Self> {
255 let new_writer = |file_name| {
256 let file_path = output_path.join(file_name);
257 csv::Writer::from_path(file_path)
258 };
259
260 Ok(Self {
261 context: None,
262 activity_writer: new_writer(ACTIVITY_FILE_NAME)?,
263 commodity_balance_duals_writer: new_writer(COMMODITY_BALANCE_DUALS_FILE_NAME)?,
264 activity_duals_writer: new_writer(ACTIVITY_DUALS_FILE_NAME)?,
265 column_duals_writer: new_writer(COLUMN_DUALS_FILE_NAME)?,
266 solver_values_writer: new_writer(SOLVER_VALUES_FILE_NAME)?,
267 appraisal_results_writer: new_writer(APPRAISAL_RESULTS_FILE_NAME)?,
268 })
269 }
270
271 fn with_context(&self, run_description: &str) -> String {
273 if let Some(context) = &self.context {
274 format!("{context}; {run_description}")
275 } else {
276 run_description.to_string()
277 }
278 }
279
280 fn write_dispatch_debug_info(
282 &mut self,
283 milestone_year: u32,
284 run_description: &str,
285 solution: &Solution,
286 ) -> Result<()> {
287 self.write_activity(milestone_year, run_description, solution.iter_activity())?;
288 self.write_activity_duals(
289 milestone_year,
290 run_description,
291 solution.iter_activity_duals(),
292 )?;
293 self.write_commodity_balance_duals(
294 milestone_year,
295 run_description,
296 solution.iter_commodity_balance_duals(),
297 )?;
298 self.write_column_duals(
299 milestone_year,
300 run_description,
301 solution.iter_column_duals(),
302 )?;
303 self.write_solver_values(milestone_year, run_description, solution.objective_value)?;
304 Ok(())
305 }
306
307 fn write_activity<'a, I>(
309 &mut self,
310 milestone_year: u32,
311 run_description: &str,
312 iter: I,
313 ) -> Result<()>
314 where
315 I: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, Activity)>,
316 {
317 for (asset, time_slice, activity) in iter {
318 let row = ActivityRow {
319 milestone_year,
320 run_description: self.with_context(run_description),
321 asset_id: asset.id(),
322 process_id: asset.process_id().clone(),
323 region_id: asset.region_id().clone(),
324 time_slice: time_slice.clone(),
325 activity,
326 };
327 self.activity_writer.serialize(row)?;
328 }
329
330 Ok(())
331 }
332
333 fn write_activity_duals<'a, I>(
335 &mut self,
336 milestone_year: u32,
337 run_description: &str,
338 iter: I,
339 ) -> Result<()>
340 where
341 I: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, MoneyPerActivity)>,
342 {
343 for (asset, time_slice, value) in iter {
344 let row = ActivityDualsRow {
345 milestone_year,
346 run_description: self.with_context(run_description),
347 asset_id: asset.id(),
348 process_id: asset.process_id().clone(),
349 region_id: asset.region_id().clone(),
350 time_slice: time_slice.clone(),
351 value,
352 };
353 self.activity_duals_writer.serialize(row)?;
354 }
355
356 Ok(())
357 }
358
359 fn write_column_duals<'a, I>(
361 &mut self,
362 milestone_year: u32,
363 run_description: &str,
364 iter: I,
365 ) -> Result<()>
366 where
367 I: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, MoneyPerActivity)>,
368 {
369 for (asset, time_slice, value) in iter {
370 let row = ColumnDualsRow {
371 milestone_year,
372 run_description: self.with_context(run_description),
373 asset_id: asset.id(),
374 process_id: asset.process_id().clone(),
375 region_id: asset.region_id().clone(),
376 time_slice: time_slice.clone(),
377 value,
378 };
379 self.column_duals_writer.serialize(row)?;
380 }
381
382 Ok(())
383 }
384
385 fn write_commodity_balance_duals<'a, I>(
387 &mut self,
388 milestone_year: u32,
389 run_description: &str,
390 iter: I,
391 ) -> Result<()>
392 where
393 I: Iterator<Item = (&'a CommodityID, &'a RegionID, &'a TimeSliceID, MoneyPerFlow)>,
394 {
395 for (commodity_id, region_id, time_slice, value) in iter {
396 let row = CommodityBalanceDualsRow {
397 milestone_year,
398 run_description: self.with_context(run_description),
399 commodity_id: commodity_id.clone(),
400 region_id: region_id.clone(),
401 time_slice: time_slice.clone(),
402 value,
403 };
404 self.commodity_balance_duals_writer.serialize(row)?;
405 }
406
407 Ok(())
408 }
409
410 fn write_solver_values(
412 &mut self,
413 milestone_year: u32,
414 run_description: &str,
415 objective_value: Money,
416 ) -> Result<()> {
417 let row = SolverValuesRow {
418 milestone_year,
419 run_description: self.with_context(run_description),
420 objective_value,
421 };
422 self.solver_values_writer.serialize(row)?;
423 self.solver_values_writer.flush()?;
424
425 Ok(())
426 }
427
428 fn write_appraisal_results(
430 &mut self,
431 milestone_year: u32,
432 run_description: &str,
433 appraisal_results: &[AppraisalOutput],
434 ) -> Result<()> {
435 for result in appraisal_results {
436 let row = AppraisalResultsRow {
437 milestone_year,
438 run_description: self.with_context(run_description),
439 asset_id: result.asset.id(),
440 process_id: result.asset.process_id().clone(),
441 region_id: result.asset.region_id().clone(),
442 capacity: result.capacity,
443 unmet_demand: result.unmet_demand.values().copied().sum(),
444 metric: result.metric,
445 };
446 self.appraisal_results_writer.serialize(row)?;
447 }
448
449 Ok(())
450 }
451
452 fn flush(&mut self) -> Result<()> {
454 self.activity_writer.flush()?;
455 self.commodity_balance_duals_writer.flush()?;
456 self.activity_duals_writer.flush()?;
457 self.solver_values_writer.flush()?;
458 self.appraisal_results_writer.flush()?;
459
460 Ok(())
461 }
462}
463
464pub struct DataWriter {
466 assets_path: PathBuf,
467 flows_writer: csv::Writer<File>,
468 prices_writer: csv::Writer<File>,
469 debug_writer: Option<DebugDataWriter>,
470}
471
472impl DataWriter {
473 pub fn create(output_path: &Path, model_path: &Path, save_debug_info: bool) -> Result<Self> {
481 write_metadata(output_path, model_path).context("Failed to save metadata")?;
482
483 let new_writer = |file_name| {
484 let file_path = output_path.join(file_name);
485 csv::Writer::from_path(file_path)
486 };
487
488 let debug_writer = if save_debug_info {
489 Some(DebugDataWriter::create(output_path)?)
491 } else {
492 None
493 };
494
495 Ok(Self {
496 assets_path: output_path.join(ASSETS_FILE_NAME),
497 flows_writer: new_writer(COMMODITY_FLOWS_FILE_NAME)?,
498 prices_writer: new_writer(COMMODITY_PRICES_FILE_NAME)?,
499 debug_writer,
500 })
501 }
502
503 pub fn write_dispatch_debug_info(
505 &mut self,
506 milestone_year: u32,
507 run_description: &str,
508 solution: &Solution,
509 ) -> Result<()> {
510 if let Some(wtr) = &mut self.debug_writer {
511 wtr.write_dispatch_debug_info(milestone_year, run_description, solution)?;
512 }
513
514 Ok(())
515 }
516
517 pub fn write_appraisal_debug_info(
519 &mut self,
520 milestone_year: u32,
521 run_description: &str,
522 appraisal_results: &[AppraisalOutput],
523 ) -> Result<()> {
524 if let Some(wtr) = &mut self.debug_writer {
525 wtr.write_appraisal_results(milestone_year, run_description, appraisal_results)?;
526 }
527
528 Ok(())
529 }
530
531 pub fn write_assets<'a, I>(&mut self, assets: I) -> Result<()>
543 where
544 I: Iterator<Item = &'a AssetRef>,
545 {
546 let mut writer = csv::Writer::from_path(&self.assets_path)?;
547 for asset in assets.sorted() {
548 let row = AssetRow::new(asset);
549 writer.serialize(row)?;
550 }
551 writer.flush()?;
552
553 Ok(())
554 }
555
556 pub fn write_flows(&mut self, milestone_year: u32, flow_map: &FlowMap) -> Result<()> {
558 for ((asset, commodity_id, time_slice), flow) in flow_map {
559 let row = CommodityFlowRow {
560 milestone_year,
561 asset_id: asset.id().unwrap(),
562 commodity_id: commodity_id.clone(),
563 time_slice: time_slice.clone(),
564 flow: *flow,
565 };
566 self.flows_writer.serialize(row)?;
567 }
568
569 Ok(())
570 }
571
572 pub fn write_prices(&mut self, milestone_year: u32, prices: &CommodityPrices) -> Result<()> {
574 for (commodity_id, region_id, time_slice, price) in prices.iter() {
575 let row = CommodityPriceRow {
576 milestone_year,
577 commodity_id: commodity_id.clone(),
578 region_id: region_id.clone(),
579 time_slice: time_slice.clone(),
580 price,
581 };
582 self.prices_writer.serialize(row)?;
583 }
584
585 Ok(())
586 }
587
588 pub fn flush(&mut self) -> Result<()> {
590 self.flows_writer.flush()?;
591 self.prices_writer.flush()?;
592 if let Some(wtr) = &mut self.debug_writer {
593 wtr.flush()?;
594 }
595
596 Ok(())
597 }
598
599 pub fn set_debug_context(&mut self, context: String) {
601 if let Some(wtr) = &mut self.debug_writer {
602 wtr.context = Some(context);
603 }
604 }
605
606 pub fn clear_debug_context(&mut self) {
608 if let Some(wtr) = &mut self.debug_writer {
609 wtr.context = None;
610 }
611 }
612}
613
614#[cfg(test)]
615mod tests {
616 use super::*;
617 use crate::asset::AssetPool;
618 use crate::fixture::{assets, commodity_id, region_id, time_slice};
619 use crate::time_slice::TimeSliceID;
620 use indexmap::indexmap;
621 use itertools::{Itertools, assert_equal};
622 use rstest::rstest;
623 use std::iter;
624 use tempfile::tempdir;
625
626 #[rstest]
627 fn test_write_assets(assets: AssetPool) {
628 let dir = tempdir().unwrap();
629
630 {
632 let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
633 writer.write_assets(assets.iter_active()).unwrap();
634 writer.flush().unwrap();
635 }
636
637 let asset = assets.iter_active().next().unwrap();
639 let expected = AssetRow::new(asset);
640 let records: Vec<AssetRow> = csv::Reader::from_path(dir.path().join(ASSETS_FILE_NAME))
641 .unwrap()
642 .into_deserialize()
643 .try_collect()
644 .unwrap();
645 assert_equal(records, iter::once(expected));
646 }
647
648 #[rstest]
649 fn test_write_flows(assets: AssetPool, commodity_id: CommodityID, time_slice: TimeSliceID) {
650 let milestone_year = 2020;
651 let asset = assets.iter_active().next().unwrap();
652 let flow_map = indexmap! {
653 (asset.clone(), commodity_id.clone(), time_slice.clone()) => Flow(42.0)
654 };
655
656 let dir = tempdir().unwrap();
658 {
659 let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
660 writer.write_flows(milestone_year, &flow_map).unwrap();
661 writer.flush().unwrap();
662 }
663
664 let expected = CommodityFlowRow {
666 milestone_year,
667 asset_id: asset.id().unwrap(),
668 commodity_id,
669 time_slice,
670 flow: Flow(42.0),
671 };
672 let records: Vec<CommodityFlowRow> =
673 csv::Reader::from_path(dir.path().join(COMMODITY_FLOWS_FILE_NAME))
674 .unwrap()
675 .into_deserialize()
676 .try_collect()
677 .unwrap();
678 assert_equal(records, iter::once(expected));
679 }
680
681 #[rstest]
682 fn test_write_prices(commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID) {
683 let milestone_year = 2020;
684 let price = MoneyPerFlow(42.0);
685 let mut prices = CommodityPrices::default();
686 prices.insert(&commodity_id, ®ion_id, &time_slice, price);
687
688 let dir = tempdir().unwrap();
689
690 {
692 let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
693 writer.write_prices(milestone_year, &prices).unwrap();
694 writer.flush().unwrap();
695 }
696
697 let expected = CommodityPriceRow {
699 milestone_year,
700 commodity_id,
701 region_id,
702 time_slice,
703 price,
704 };
705 let records: Vec<CommodityPriceRow> =
706 csv::Reader::from_path(dir.path().join(COMMODITY_PRICES_FILE_NAME))
707 .unwrap()
708 .into_deserialize()
709 .try_collect()
710 .unwrap();
711 assert_equal(records, iter::once(expected));
712 }
713
714 #[rstest]
715 fn test_write_commodity_balance_duals(
716 commodity_id: CommodityID,
717 region_id: RegionID,
718 time_slice: TimeSliceID,
719 ) {
720 let milestone_year = 2020;
721 let run_description = "test_run".to_string();
722 let value = MoneyPerFlow(0.5);
723 let dir = tempdir().unwrap();
724
725 {
727 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
728 writer
729 .write_commodity_balance_duals(
730 milestone_year,
731 &run_description,
732 iter::once((&commodity_id, ®ion_id, &time_slice, value)),
733 )
734 .unwrap();
735 writer.flush().unwrap();
736 }
737
738 let expected = CommodityBalanceDualsRow {
740 milestone_year,
741 run_description,
742 commodity_id,
743 region_id,
744 time_slice,
745 value,
746 };
747 let records: Vec<CommodityBalanceDualsRow> =
748 csv::Reader::from_path(dir.path().join(COMMODITY_BALANCE_DUALS_FILE_NAME))
749 .unwrap()
750 .into_deserialize()
751 .try_collect()
752 .unwrap();
753 assert_equal(records, iter::once(expected));
754 }
755
756 #[rstest]
757 fn test_write_activity_duals(assets: AssetPool, time_slice: TimeSliceID) {
758 let milestone_year = 2020;
759 let run_description = "test_run".to_string();
760 let value = MoneyPerActivity(0.5);
761 let dir = tempdir().unwrap();
762 let asset = assets.iter_active().next().unwrap();
763
764 {
766 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
767 writer
768 .write_activity_duals(
769 milestone_year,
770 &run_description,
771 iter::once((asset, &time_slice, value)),
772 )
773 .unwrap();
774 writer.flush().unwrap();
775 }
776
777 let expected = ActivityDualsRow {
779 milestone_year,
780 run_description,
781 asset_id: asset.id(),
782 process_id: asset.process_id().clone(),
783 region_id: asset.region_id().clone(),
784 time_slice,
785 value,
786 };
787 let records: Vec<ActivityDualsRow> =
788 csv::Reader::from_path(dir.path().join(ACTIVITY_DUALS_FILE_NAME))
789 .unwrap()
790 .into_deserialize()
791 .try_collect()
792 .unwrap();
793 assert_equal(records, iter::once(expected));
794 }
795
796 #[rstest]
797 fn test_write_column_duals(assets: AssetPool, time_slice: TimeSliceID) {
798 let milestone_year = 2020;
799 let run_description = "test_run".to_string();
800 let value = MoneyPerActivity(0.5);
801 let dir = tempdir().unwrap();
802 let asset = assets.iter_active().next().unwrap();
803
804 {
806 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
807 writer
808 .write_column_duals(
809 milestone_year,
810 &run_description,
811 iter::once((asset, &time_slice, value)),
812 )
813 .unwrap();
814 writer.flush().unwrap();
815 }
816
817 let expected = ColumnDualsRow {
819 milestone_year,
820 run_description,
821 asset_id: asset.id(),
822 process_id: asset.process_id().clone(),
823 region_id: asset.region_id().clone(),
824 time_slice,
825 value,
826 };
827 let records: Vec<ColumnDualsRow> =
828 csv::Reader::from_path(dir.path().join(COLUMN_DUALS_FILE_NAME))
829 .unwrap()
830 .into_deserialize()
831 .try_collect()
832 .unwrap();
833 assert_equal(records, iter::once(expected));
834 }
835
836 #[rstest]
837 fn test_write_activity(assets: AssetPool, time_slice: TimeSliceID) {
838 let milestone_year = 2020;
839 let run_description = "test_run".to_string();
840 let activity = Activity(100.5);
841 let dir = tempdir().unwrap();
842 let asset = assets.iter_active().next().unwrap();
843
844 {
846 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
847 writer
848 .write_activity(
849 milestone_year,
850 &run_description,
851 iter::once((asset, &time_slice, activity)),
852 )
853 .unwrap();
854 writer.flush().unwrap();
855 }
856
857 let expected = ActivityRow {
859 milestone_year,
860 run_description,
861 asset_id: asset.id(),
862 process_id: asset.process_id().clone(),
863 region_id: asset.region_id().clone(),
864 time_slice,
865 activity,
866 };
867 let records: Vec<ActivityRow> = csv::Reader::from_path(dir.path().join(ACTIVITY_FILE_NAME))
868 .unwrap()
869 .into_deserialize()
870 .try_collect()
871 .unwrap();
872 assert_equal(records, iter::once(expected));
873 }
874
875 #[rstest]
876 fn test_write_solver_values() {
877 let milestone_year = 2020;
878 let run_description = "test_run".to_string();
879 let objective_value = Money(1234.56);
880 let dir = tempdir().unwrap();
881
882 {
884 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
885 writer
886 .write_solver_values(milestone_year, &run_description, objective_value)
887 .unwrap();
888 writer.flush().unwrap();
889 }
890
891 let expected = SolverValuesRow {
893 milestone_year,
894 run_description,
895 objective_value,
896 };
897 let records: Vec<SolverValuesRow> =
898 csv::Reader::from_path(dir.path().join(SOLVER_VALUES_FILE_NAME))
899 .unwrap()
900 .into_deserialize()
901 .try_collect()
902 .unwrap();
903 assert_equal(records, iter::once(expected));
904 }
905
906 #[rstest]
907 fn test_write_appraisal_results(assets: AssetPool) {
908 let milestone_year = 2020;
909 let run_description = "test_run".to_string();
910 let dir = tempdir().unwrap();
911 let asset = assets.iter_active().next().unwrap();
912
913 {
915 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
916 let appraisal = AppraisalOutput {
917 asset: asset.clone(),
918 capacity: Capacity(42.0),
919 unmet_demand: Default::default(),
920 metric: 4.14,
921 };
922 writer
923 .write_appraisal_results(milestone_year, &run_description, &[appraisal])
924 .unwrap();
925 writer.flush().unwrap();
926 }
927
928 let expected = AppraisalResultsRow {
930 milestone_year,
931 run_description,
932 asset_id: asset.id(),
933 process_id: asset.process_id().clone(),
934 region_id: asset.region_id().clone(),
935 capacity: Capacity(42.0),
936 unmet_demand: Flow(0.0),
937 metric: 4.14,
938 };
939 let records: Vec<AppraisalResultsRow> =
940 csv::Reader::from_path(dir.path().join(APPRAISAL_RESULTS_FILE_NAME))
941 .unwrap()
942 .into_deserialize()
943 .try_collect()
944 .unwrap();
945 assert_equal(records, iter::once(expected));
946 }
947
948 #[test]
949 fn test_create_output_directory_new_directory() {
950 let temp_dir = tempdir().unwrap();
951 let output_dir = temp_dir.path().join("new_output");
952
953 let result = create_output_directory(&output_dir, false).unwrap();
955 assert!(!result);
956 assert!(output_dir.exists());
957 assert!(output_dir.is_dir());
958 }
959
960 #[test]
961 fn test_create_output_directory_existing_empty_directory() {
962 let temp_dir = tempdir().unwrap();
963 let output_dir = temp_dir.path().join("empty_output");
964
965 fs::create_dir(&output_dir).unwrap();
967
968 let result = create_output_directory(&output_dir, false).unwrap();
970 assert!(!result);
971 assert!(output_dir.exists());
972 assert!(output_dir.is_dir());
973 }
974
975 #[test]
976 fn test_create_output_directory_existing_with_files_no_overwrite() {
977 let temp_dir = tempdir().unwrap();
978 let output_dir = temp_dir.path().join("output_with_files");
979
980 fs::create_dir(&output_dir).unwrap();
982 fs::write(output_dir.join("existing_file.txt"), "some content").unwrap();
983
984 let result = create_output_directory(&output_dir, false);
986 assert!(result.is_err());
987 assert!(
988 result
989 .unwrap_err()
990 .to_string()
991 .contains("Output folder already exists")
992 );
993 }
994
995 #[test]
996 fn test_create_output_directory_existing_with_files_allow_overwrite() {
997 let temp_dir = tempdir().unwrap();
998 let output_dir = temp_dir.path().join("output_with_files");
999
1000 fs::create_dir(&output_dir).unwrap();
1002 let file_path = output_dir.join("existing_file.txt");
1003 fs::write(&file_path, "some content").unwrap();
1004
1005 let result = create_output_directory(&output_dir, true).unwrap();
1007 assert!(result);
1008 assert!(output_dir.exists());
1009 assert!(output_dir.is_dir());
1010 assert!(!file_path.exists()); }
1012
1013 #[test]
1014 fn test_create_output_directory_nested_path() {
1015 let temp_dir = tempdir().unwrap();
1016 let output_dir = temp_dir.path().join("nested").join("path").join("output");
1017
1018 let result = create_output_directory(&output_dir, false).unwrap();
1020 assert!(!result);
1021 assert!(output_dir.exists());
1022 assert!(output_dir.is_dir());
1023 }
1024
1025 #[test]
1026 fn test_create_output_directory_existing_subdirs_with_files_allow_overwrite() {
1027 let temp_dir = tempdir().unwrap();
1028 let output_dir = temp_dir.path().join("output_with_subdirs");
1029
1030 fs::create_dir_all(&output_dir.join("subdir")).unwrap();
1032 fs::write(output_dir.join("file1.txt"), "content1").unwrap();
1033 fs::write(output_dir.join("subdir").join("file2.txt"), "content2").unwrap();
1034
1035 let result = create_output_directory(&output_dir, true).unwrap();
1037 assert!(result);
1038 assert!(output_dir.exists());
1039 assert!(output_dir.is_dir());
1040 assert!(!output_dir.join("file1.txt").exists());
1042 assert!(!output_dir.join("subdir").exists());
1043 }
1044}