1use crate::agent::AgentID;
3use crate::asset::{Asset, AssetID, AssetRef};
4use crate::commodity::CommodityID;
5use crate::process::ProcessID;
6use crate::region::RegionID;
7use crate::simulation::CommodityPrices;
8use crate::simulation::investment::appraisal::AppraisalOutput;
9use crate::simulation::optimisation::{FlowMap, Solution};
10use crate::time_slice::TimeSliceID;
11use crate::units::{Activity, Capacity, Flow, Money, MoneyPerActivity, MoneyPerFlow};
12use anyhow::{Context, Result, ensure};
13use csv;
14use itertools::Itertools;
15use serde::{Deserialize, Serialize};
16use std::fs;
17use std::fs::File;
18use std::path::{Path, PathBuf};
19
20pub mod metadata;
21use metadata::write_metadata;
22
23const OUTPUT_DIRECTORY_ROOT: &str = "muse2_results";
25
26const COMMODITY_FLOWS_FILE_NAME: &str = "commodity_flows.csv";
28
29const COMMODITY_PRICES_FILE_NAME: &str = "commodity_prices.csv";
31
32const ASSETS_FILE_NAME: &str = "assets.csv";
34
35const ACTIVITY_FILE_NAME: &str = "debug_activity.csv";
37
38const COMMODITY_BALANCE_DUALS_FILE_NAME: &str = "debug_commodity_balance_duals.csv";
40
41const ACTIVITY_DUALS_FILE_NAME: &str = "debug_activity_duals.csv";
43
44const SOLVER_VALUES_FILE_NAME: &str = "debug_solver.csv";
46
47const APPRAISAL_RESULTS_FILE_NAME: &str = "debug_appraisal_results.csv";
49
50const GRAPHS_DIRECTORY_ROOT: &str = "muse2_graphs";
52
53pub fn get_output_dir(model_dir: &Path) -> Result<PathBuf> {
55 let model_dir = model_dir
58 .canonicalize() .context("Could not resolve path to model")?;
60
61 let model_name = model_dir
62 .file_name()
63 .context("Model cannot be in root folder")?
64 .to_str()
65 .context("Invalid chars in model dir name")?;
66
67 Ok([OUTPUT_DIRECTORY_ROOT, model_name].iter().collect())
69}
70
71pub fn get_graphs_dir(model_dir: &Path) -> Result<PathBuf> {
73 let model_dir = model_dir
74 .canonicalize() .context("Could not resolve path to model")?;
76 let model_name = model_dir
77 .file_name()
78 .context("Model cannot be in root folder")?
79 .to_str()
80 .context("Invalid chars in model dir name")?;
81 Ok([GRAPHS_DIRECTORY_ROOT, model_name].iter().collect())
82}
83
84pub fn create_output_directory(output_dir: &Path, allow_overwrite: bool) -> Result<bool> {
95 let overwrite = if let Ok(mut it) = fs::read_dir(output_dir) {
97 if it.next().is_none() {
98 return Ok(false);
100 }
101
102 ensure!(
103 allow_overwrite,
104 "Output folder already exists and is not empty. \
105 Please delete the folder or pass the --overwrite command-line option."
106 );
107
108 fs::remove_dir_all(output_dir).context("Could not delete folder")?;
109 true
110 } else {
111 false
112 };
113
114 fs::create_dir_all(output_dir)?;
116
117 Ok(overwrite)
118}
119
120#[derive(Serialize, Deserialize, Debug, PartialEq)]
122struct AssetRow {
123 asset_id: AssetID,
124 process_id: ProcessID,
125 region_id: RegionID,
126 agent_id: AgentID,
127 commission_year: u32,
128 decommission_year: Option<u32>,
129 capacity: Capacity,
130}
131
132impl AssetRow {
133 fn new(asset: &Asset) -> Self {
135 Self {
136 asset_id: asset.id().unwrap(),
137 process_id: asset.process_id().clone(),
138 region_id: asset.region_id().clone(),
139 agent_id: asset.agent_id().unwrap().clone(),
140 commission_year: asset.commission_year(),
141 decommission_year: asset.decommission_year(),
142 capacity: asset.capacity(),
143 }
144 }
145}
146
147#[derive(Serialize, Deserialize, Debug, PartialEq)]
149struct CommodityFlowRow {
150 milestone_year: u32,
151 asset_id: AssetID,
152 commodity_id: CommodityID,
153 time_slice: TimeSliceID,
154 flow: Flow,
155}
156
157#[derive(Serialize, Deserialize, Debug, PartialEq)]
159struct CommodityPriceRow {
160 milestone_year: u32,
161 commodity_id: CommodityID,
162 region_id: RegionID,
163 time_slice: TimeSliceID,
164 price: MoneyPerFlow,
165}
166
167#[derive(Serialize, Deserialize, Debug, PartialEq)]
169struct ActivityRow {
170 milestone_year: u32,
171 run_description: String,
172 asset_id: Option<AssetID>,
173 process_id: ProcessID,
174 region_id: RegionID,
175 time_slice: TimeSliceID,
176 activity: Activity,
177}
178
179#[derive(Serialize, Deserialize, Debug, PartialEq)]
181struct ActivityDualsRow {
182 milestone_year: u32,
183 run_description: String,
184 asset_id: Option<AssetID>,
185 process_id: ProcessID,
186 region_id: RegionID,
187 time_slice: TimeSliceID,
188 value: MoneyPerActivity,
189}
190
191#[derive(Serialize, Deserialize, Debug, PartialEq)]
193struct CommodityBalanceDualsRow {
194 milestone_year: u32,
195 run_description: String,
196 commodity_id: CommodityID,
197 region_id: RegionID,
198 time_slice: TimeSliceID,
199 value: MoneyPerFlow,
200}
201
202#[derive(Serialize, Deserialize, Debug, PartialEq)]
204struct SolverValuesRow {
205 milestone_year: u32,
206 run_description: String,
207 objective_value: Money,
208}
209
210#[derive(Serialize, Deserialize, Debug, PartialEq)]
212struct AppraisalResultsRow {
213 milestone_year: u32,
214 run_description: String,
215 asset_id: Option<AssetID>,
216 process_id: ProcessID,
217 region_id: RegionID,
218 capacity: Capacity,
219 unmet_demand: Flow,
220 metric: f64,
221}
222
223struct DebugDataWriter {
225 context: Option<String>,
226 activity_writer: csv::Writer<File>,
227 commodity_balance_duals_writer: csv::Writer<File>,
228 activity_duals_writer: csv::Writer<File>,
229 solver_values_writer: csv::Writer<File>,
230 appraisal_results_writer: csv::Writer<File>,
231}
232
233impl DebugDataWriter {
234 fn create(output_path: &Path) -> Result<Self> {
240 let new_writer = |file_name| {
241 let file_path = output_path.join(file_name);
242 csv::Writer::from_path(file_path)
243 };
244
245 Ok(Self {
246 context: None,
247 activity_writer: new_writer(ACTIVITY_FILE_NAME)?,
248 commodity_balance_duals_writer: new_writer(COMMODITY_BALANCE_DUALS_FILE_NAME)?,
249 activity_duals_writer: new_writer(ACTIVITY_DUALS_FILE_NAME)?,
250 solver_values_writer: new_writer(SOLVER_VALUES_FILE_NAME)?,
251 appraisal_results_writer: new_writer(APPRAISAL_RESULTS_FILE_NAME)?,
252 })
253 }
254
255 fn with_context(&self, run_description: &str) -> String {
257 if let Some(context) = &self.context {
258 format!("{context}; {run_description}")
259 } else {
260 run_description.to_string()
261 }
262 }
263
264 fn write_dispatch_debug_info(
266 &mut self,
267 milestone_year: u32,
268 run_description: &str,
269 solution: &Solution,
270 ) -> Result<()> {
271 self.write_activity(milestone_year, run_description, solution.iter_activity())?;
272 self.write_activity_duals(
273 milestone_year,
274 run_description,
275 solution.iter_activity_duals(),
276 )?;
277 self.write_commodity_balance_duals(
278 milestone_year,
279 run_description,
280 solution.iter_commodity_balance_duals(),
281 )?;
282 self.write_solver_values(milestone_year, run_description, solution.objective_value)?;
283 Ok(())
284 }
285
286 fn write_activity<'a, I>(
288 &mut self,
289 milestone_year: u32,
290 run_description: &str,
291 iter: I,
292 ) -> Result<()>
293 where
294 I: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, Activity)>,
295 {
296 for (asset, time_slice, activity) in iter {
297 let row = ActivityRow {
298 milestone_year,
299 run_description: self.with_context(run_description),
300 asset_id: asset.id(),
301 process_id: asset.process_id().clone(),
302 region_id: asset.region_id().clone(),
303 time_slice: time_slice.clone(),
304 activity,
305 };
306 self.activity_writer.serialize(row)?;
307 }
308
309 Ok(())
310 }
311
312 fn write_activity_duals<'a, I>(
314 &mut self,
315 milestone_year: u32,
316 run_description: &str,
317 iter: I,
318 ) -> Result<()>
319 where
320 I: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, MoneyPerActivity)>,
321 {
322 for (asset, time_slice, value) in iter {
323 let row = ActivityDualsRow {
324 milestone_year,
325 run_description: self.with_context(run_description),
326 asset_id: asset.id(),
327 process_id: asset.process_id().clone(),
328 region_id: asset.region_id().clone(),
329 time_slice: time_slice.clone(),
330 value,
331 };
332 self.activity_duals_writer.serialize(row)?;
333 }
334
335 Ok(())
336 }
337
338 fn write_commodity_balance_duals<'a, I>(
340 &mut self,
341 milestone_year: u32,
342 run_description: &str,
343 iter: I,
344 ) -> Result<()>
345 where
346 I: Iterator<Item = (&'a CommodityID, &'a RegionID, &'a TimeSliceID, MoneyPerFlow)>,
347 {
348 for (commodity_id, region_id, time_slice, value) in iter {
349 let row = CommodityBalanceDualsRow {
350 milestone_year,
351 run_description: self.with_context(run_description),
352 commodity_id: commodity_id.clone(),
353 region_id: region_id.clone(),
354 time_slice: time_slice.clone(),
355 value,
356 };
357 self.commodity_balance_duals_writer.serialize(row)?;
358 }
359
360 Ok(())
361 }
362
363 fn write_solver_values(
365 &mut self,
366 milestone_year: u32,
367 run_description: &str,
368 objective_value: Money,
369 ) -> Result<()> {
370 let row = SolverValuesRow {
371 milestone_year,
372 run_description: self.with_context(run_description),
373 objective_value,
374 };
375 self.solver_values_writer.serialize(row)?;
376 self.solver_values_writer.flush()?;
377
378 Ok(())
379 }
380
381 fn write_appraisal_results(
383 &mut self,
384 milestone_year: u32,
385 run_description: &str,
386 appraisal_results: &[AppraisalOutput],
387 ) -> Result<()> {
388 for result in appraisal_results {
389 let row = AppraisalResultsRow {
390 milestone_year,
391 run_description: self.with_context(run_description),
392 asset_id: result.asset.id(),
393 process_id: result.asset.process_id().clone(),
394 region_id: result.asset.region_id().clone(),
395 capacity: result.capacity,
396 unmet_demand: result.unmet_demand.values().copied().sum(),
397 metric: result.metric,
398 };
399 self.appraisal_results_writer.serialize(row)?;
400 }
401
402 Ok(())
403 }
404
405 fn flush(&mut self) -> Result<()> {
407 self.activity_writer.flush()?;
408 self.commodity_balance_duals_writer.flush()?;
409 self.activity_duals_writer.flush()?;
410 self.solver_values_writer.flush()?;
411 self.appraisal_results_writer.flush()?;
412
413 Ok(())
414 }
415}
416
417pub struct DataWriter {
419 assets_path: PathBuf,
420 flows_writer: csv::Writer<File>,
421 prices_writer: csv::Writer<File>,
422 debug_writer: Option<DebugDataWriter>,
423}
424
425impl DataWriter {
426 pub fn create(output_path: &Path, model_path: &Path, save_debug_info: bool) -> Result<Self> {
434 write_metadata(output_path, model_path).context("Failed to save metadata")?;
435
436 let new_writer = |file_name| {
437 let file_path = output_path.join(file_name);
438 csv::Writer::from_path(file_path)
439 };
440
441 let debug_writer = if save_debug_info {
442 Some(DebugDataWriter::create(output_path)?)
444 } else {
445 None
446 };
447
448 Ok(Self {
449 assets_path: output_path.join(ASSETS_FILE_NAME),
450 flows_writer: new_writer(COMMODITY_FLOWS_FILE_NAME)?,
451 prices_writer: new_writer(COMMODITY_PRICES_FILE_NAME)?,
452 debug_writer,
453 })
454 }
455
456 pub fn write_dispatch_debug_info(
458 &mut self,
459 milestone_year: u32,
460 run_description: &str,
461 solution: &Solution,
462 ) -> Result<()> {
463 if let Some(wtr) = &mut self.debug_writer {
464 wtr.write_dispatch_debug_info(milestone_year, run_description, solution)?;
465 }
466
467 Ok(())
468 }
469
470 pub fn write_appraisal_debug_info(
472 &mut self,
473 milestone_year: u32,
474 run_description: &str,
475 appraisal_results: &[AppraisalOutput],
476 ) -> Result<()> {
477 if let Some(wtr) = &mut self.debug_writer {
478 wtr.write_appraisal_results(milestone_year, run_description, appraisal_results)?;
479 }
480
481 Ok(())
482 }
483
484 pub fn write_assets<'a, I>(&mut self, assets: I) -> Result<()>
496 where
497 I: Iterator<Item = &'a AssetRef>,
498 {
499 let mut writer = csv::Writer::from_path(&self.assets_path)?;
500 for asset in assets.sorted() {
501 let row = AssetRow::new(asset);
502 writer.serialize(row)?;
503 }
504 writer.flush()?;
505
506 Ok(())
507 }
508
509 pub fn write_flows(&mut self, milestone_year: u32, flow_map: &FlowMap) -> Result<()> {
511 for ((asset, commodity_id, time_slice), flow) in flow_map {
512 let row = CommodityFlowRow {
513 milestone_year,
514 asset_id: asset.id().unwrap(),
515 commodity_id: commodity_id.clone(),
516 time_slice: time_slice.clone(),
517 flow: *flow,
518 };
519 self.flows_writer.serialize(row)?;
520 }
521
522 Ok(())
523 }
524
525 pub fn write_prices(&mut self, milestone_year: u32, prices: &CommodityPrices) -> Result<()> {
527 for (commodity_id, region_id, time_slice, price) in prices.iter() {
528 let row = CommodityPriceRow {
529 milestone_year,
530 commodity_id: commodity_id.clone(),
531 region_id: region_id.clone(),
532 time_slice: time_slice.clone(),
533 price,
534 };
535 self.prices_writer.serialize(row)?;
536 }
537
538 Ok(())
539 }
540
541 pub fn flush(&mut self) -> Result<()> {
543 self.flows_writer.flush()?;
544 self.prices_writer.flush()?;
545 if let Some(wtr) = &mut self.debug_writer {
546 wtr.flush()?;
547 }
548
549 Ok(())
550 }
551
552 pub fn set_debug_context(&mut self, context: String) {
554 if let Some(wtr) = &mut self.debug_writer {
555 wtr.context = Some(context);
556 }
557 }
558
559 pub fn clear_debug_context(&mut self) {
561 if let Some(wtr) = &mut self.debug_writer {
562 wtr.context = None;
563 }
564 }
565}
566
567#[cfg(test)]
568mod tests {
569 use super::*;
570 use crate::asset::AssetPool;
571 use crate::fixture::{assets, commodity_id, region_id, time_slice};
572 use crate::time_slice::TimeSliceID;
573 use indexmap::indexmap;
574 use itertools::{Itertools, assert_equal};
575 use rstest::rstest;
576 use std::iter;
577 use tempfile::tempdir;
578
579 #[rstest]
580 fn test_write_assets(assets: AssetPool) {
581 let dir = tempdir().unwrap();
582
583 {
585 let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
586 writer.write_assets(assets.iter_active()).unwrap();
587 writer.flush().unwrap();
588 }
589
590 let asset = assets.iter_active().next().unwrap();
592 let expected = AssetRow::new(asset);
593 let records: Vec<AssetRow> = csv::Reader::from_path(dir.path().join(ASSETS_FILE_NAME))
594 .unwrap()
595 .into_deserialize()
596 .try_collect()
597 .unwrap();
598 assert_equal(records, iter::once(expected));
599 }
600
601 #[rstest]
602 fn test_write_flows(assets: AssetPool, commodity_id: CommodityID, time_slice: TimeSliceID) {
603 let milestone_year = 2020;
604 let asset = assets.iter_active().next().unwrap();
605 let flow_map = indexmap! {
606 (asset.clone(), commodity_id.clone(), time_slice.clone()) => Flow(42.0)
607 };
608
609 let dir = tempdir().unwrap();
611 {
612 let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
613 writer.write_flows(milestone_year, &flow_map).unwrap();
614 writer.flush().unwrap();
615 }
616
617 let expected = CommodityFlowRow {
619 milestone_year,
620 asset_id: asset.id().unwrap(),
621 commodity_id,
622 time_slice,
623 flow: Flow(42.0),
624 };
625 let records: Vec<CommodityFlowRow> =
626 csv::Reader::from_path(dir.path().join(COMMODITY_FLOWS_FILE_NAME))
627 .unwrap()
628 .into_deserialize()
629 .try_collect()
630 .unwrap();
631 assert_equal(records, iter::once(expected));
632 }
633
634 #[rstest]
635 fn test_write_prices(commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID) {
636 let milestone_year = 2020;
637 let price = MoneyPerFlow(42.0);
638 let mut prices = CommodityPrices::default();
639 prices.insert(&commodity_id, ®ion_id, &time_slice, price);
640
641 let dir = tempdir().unwrap();
642
643 {
645 let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
646 writer.write_prices(milestone_year, &prices).unwrap();
647 writer.flush().unwrap();
648 }
649
650 let expected = CommodityPriceRow {
652 milestone_year,
653 commodity_id,
654 region_id,
655 time_slice,
656 price,
657 };
658 let records: Vec<CommodityPriceRow> =
659 csv::Reader::from_path(dir.path().join(COMMODITY_PRICES_FILE_NAME))
660 .unwrap()
661 .into_deserialize()
662 .try_collect()
663 .unwrap();
664 assert_equal(records, iter::once(expected));
665 }
666
667 #[rstest]
668 fn test_write_commodity_balance_duals(
669 commodity_id: CommodityID,
670 region_id: RegionID,
671 time_slice: TimeSliceID,
672 ) {
673 let milestone_year = 2020;
674 let run_description = "test_run".to_string();
675 let value = MoneyPerFlow(0.5);
676 let dir = tempdir().unwrap();
677
678 {
680 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
681 writer
682 .write_commodity_balance_duals(
683 milestone_year,
684 &run_description,
685 iter::once((&commodity_id, ®ion_id, &time_slice, value)),
686 )
687 .unwrap();
688 writer.flush().unwrap();
689 }
690
691 let expected = CommodityBalanceDualsRow {
693 milestone_year,
694 run_description,
695 commodity_id,
696 region_id,
697 time_slice,
698 value,
699 };
700 let records: Vec<CommodityBalanceDualsRow> =
701 csv::Reader::from_path(dir.path().join(COMMODITY_BALANCE_DUALS_FILE_NAME))
702 .unwrap()
703 .into_deserialize()
704 .try_collect()
705 .unwrap();
706 assert_equal(records, iter::once(expected));
707 }
708
709 #[rstest]
710 fn test_write_activity_duals(assets: AssetPool, time_slice: TimeSliceID) {
711 let milestone_year = 2020;
712 let run_description = "test_run".to_string();
713 let value = MoneyPerActivity(0.5);
714 let dir = tempdir().unwrap();
715 let asset = assets.iter_active().next().unwrap();
716
717 {
719 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
720 writer
721 .write_activity_duals(
722 milestone_year,
723 &run_description,
724 iter::once((asset, &time_slice, value)),
725 )
726 .unwrap();
727 writer.flush().unwrap();
728 }
729
730 let expected = ActivityDualsRow {
732 milestone_year,
733 run_description,
734 asset_id: asset.id(),
735 process_id: asset.process_id().clone(),
736 region_id: asset.region_id().clone(),
737 time_slice,
738 value,
739 };
740 let records: Vec<ActivityDualsRow> =
741 csv::Reader::from_path(dir.path().join(ACTIVITY_DUALS_FILE_NAME))
742 .unwrap()
743 .into_deserialize()
744 .try_collect()
745 .unwrap();
746 assert_equal(records, iter::once(expected));
747 }
748
749 #[rstest]
750 fn test_write_activity(assets: AssetPool, time_slice: TimeSliceID) {
751 let milestone_year = 2020;
752 let run_description = "test_run".to_string();
753 let activity = Activity(100.5);
754 let dir = tempdir().unwrap();
755 let asset = assets.iter_active().next().unwrap();
756
757 {
759 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
760 writer
761 .write_activity(
762 milestone_year,
763 &run_description,
764 iter::once((asset, &time_slice, activity)),
765 )
766 .unwrap();
767 writer.flush().unwrap();
768 }
769
770 let expected = ActivityRow {
772 milestone_year,
773 run_description,
774 asset_id: asset.id(),
775 process_id: asset.process_id().clone(),
776 region_id: asset.region_id().clone(),
777 time_slice,
778 activity,
779 };
780 let records: Vec<ActivityRow> = csv::Reader::from_path(dir.path().join(ACTIVITY_FILE_NAME))
781 .unwrap()
782 .into_deserialize()
783 .try_collect()
784 .unwrap();
785 assert_equal(records, iter::once(expected));
786 }
787
788 #[rstest]
789 fn test_write_solver_values() {
790 let milestone_year = 2020;
791 let run_description = "test_run".to_string();
792 let objective_value = Money(1234.56);
793 let dir = tempdir().unwrap();
794
795 {
797 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
798 writer
799 .write_solver_values(milestone_year, &run_description, objective_value)
800 .unwrap();
801 writer.flush().unwrap();
802 }
803
804 let expected = SolverValuesRow {
806 milestone_year,
807 run_description,
808 objective_value,
809 };
810 let records: Vec<SolverValuesRow> =
811 csv::Reader::from_path(dir.path().join(SOLVER_VALUES_FILE_NAME))
812 .unwrap()
813 .into_deserialize()
814 .try_collect()
815 .unwrap();
816 assert_equal(records, iter::once(expected));
817 }
818
819 #[rstest]
820 fn test_write_appraisal_results(assets: AssetPool) {
821 let milestone_year = 2020;
822 let run_description = "test_run".to_string();
823 let dir = tempdir().unwrap();
824 let asset = assets.iter_active().next().unwrap();
825
826 {
828 let mut writer = DebugDataWriter::create(dir.path()).unwrap();
829 let appraisal = AppraisalOutput {
830 asset: asset.clone(),
831 capacity: Capacity(42.0),
832 unmet_demand: Default::default(),
833 metric: 4.14,
834 };
835 writer
836 .write_appraisal_results(milestone_year, &run_description, &[appraisal])
837 .unwrap();
838 writer.flush().unwrap();
839 }
840
841 let expected = AppraisalResultsRow {
843 milestone_year,
844 run_description,
845 asset_id: asset.id(),
846 process_id: asset.process_id().clone(),
847 region_id: asset.region_id().clone(),
848 capacity: Capacity(42.0),
849 unmet_demand: Flow(0.0),
850 metric: 4.14,
851 };
852 let records: Vec<AppraisalResultsRow> =
853 csv::Reader::from_path(dir.path().join(APPRAISAL_RESULTS_FILE_NAME))
854 .unwrap()
855 .into_deserialize()
856 .try_collect()
857 .unwrap();
858 assert_equal(records, iter::once(expected));
859 }
860
861 #[test]
862 fn test_create_output_directory_new_directory() {
863 let temp_dir = tempdir().unwrap();
864 let output_dir = temp_dir.path().join("new_output");
865
866 let result = create_output_directory(&output_dir, false).unwrap();
868 assert!(!result);
869 assert!(output_dir.exists());
870 assert!(output_dir.is_dir());
871 }
872
873 #[test]
874 fn test_create_output_directory_existing_empty_directory() {
875 let temp_dir = tempdir().unwrap();
876 let output_dir = temp_dir.path().join("empty_output");
877
878 fs::create_dir(&output_dir).unwrap();
880
881 let result = create_output_directory(&output_dir, false).unwrap();
883 assert!(!result);
884 assert!(output_dir.exists());
885 assert!(output_dir.is_dir());
886 }
887
888 #[test]
889 fn test_create_output_directory_existing_with_files_no_overwrite() {
890 let temp_dir = tempdir().unwrap();
891 let output_dir = temp_dir.path().join("output_with_files");
892
893 fs::create_dir(&output_dir).unwrap();
895 fs::write(output_dir.join("existing_file.txt"), "some content").unwrap();
896
897 let result = create_output_directory(&output_dir, false);
899 assert!(result.is_err());
900 assert!(
901 result
902 .unwrap_err()
903 .to_string()
904 .contains("Output folder already exists")
905 );
906 }
907
908 #[test]
909 fn test_create_output_directory_existing_with_files_allow_overwrite() {
910 let temp_dir = tempdir().unwrap();
911 let output_dir = temp_dir.path().join("output_with_files");
912
913 fs::create_dir(&output_dir).unwrap();
915 let file_path = output_dir.join("existing_file.txt");
916 fs::write(&file_path, "some content").unwrap();
917
918 let result = create_output_directory(&output_dir, true).unwrap();
920 assert!(result);
921 assert!(output_dir.exists());
922 assert!(output_dir.is_dir());
923 assert!(!file_path.exists()); }
925
926 #[test]
927 fn test_create_output_directory_nested_path() {
928 let temp_dir = tempdir().unwrap();
929 let output_dir = temp_dir.path().join("nested").join("path").join("output");
930
931 let result = create_output_directory(&output_dir, false).unwrap();
933 assert!(!result);
934 assert!(output_dir.exists());
935 assert!(output_dir.is_dir());
936 }
937
938 #[test]
939 fn test_create_output_directory_existing_subdirs_with_files_allow_overwrite() {
940 let temp_dir = tempdir().unwrap();
941 let output_dir = temp_dir.path().join("output_with_subdirs");
942
943 fs::create_dir_all(&output_dir.join("subdir")).unwrap();
945 fs::write(output_dir.join("file1.txt"), "content1").unwrap();
946 fs::write(output_dir.join("subdir").join("file2.txt"), "content2").unwrap();
947
948 let result = create_output_directory(&output_dir, true).unwrap();
950 assert!(result);
951 assert!(output_dir.exists());
952 assert!(output_dir.is_dir());
953 assert!(!output_dir.join("file1.txt").exists());
955 assert!(!output_dir.join("subdir").exists());
956 }
957}