muse2/
output.rs

1//! The module responsible for writing output data to disk.
2use crate::agent::AgentID;
3use crate::asset::{Asset, AssetGroupID, AssetID, AssetRef};
4use crate::commodity::CommodityID;
5use crate::process::ProcessID;
6use crate::region::RegionID;
7use crate::simulation::CommodityPrices;
8use crate::simulation::investment::appraisal::AppraisalOutput;
9use crate::simulation::optimisation::{FlowMap, Solution};
10use crate::time_slice::TimeSliceID;
11use crate::units::{
12    Activity, Capacity, Flow, Money, MoneyPerActivity, MoneyPerCapacity, MoneyPerFlow,
13};
14use anyhow::{Context, Result, ensure};
15use csv;
16use indexmap::IndexMap;
17use itertools::Itertools;
18use serde::{Deserialize, Serialize};
19use std::fs;
20use std::fs::File;
21use std::path::{Path, PathBuf};
22
23pub mod metadata;
24use metadata::write_metadata;
25
26/// The output file name for commodity flows
27const COMMODITY_FLOWS_FILE_NAME: &str = "commodity_flows.csv";
28
29/// The output file name for commodity prices
30const COMMODITY_PRICES_FILE_NAME: &str = "commodity_prices.csv";
31
32/// The output file name for assets
33const ASSETS_FILE_NAME: &str = "assets.csv";
34
35/// Debug output file for asset dispatch
36const ACTIVITY_ASSET_DISPATCH: &str = "debug_dispatch_assets.csv";
37
38/// The output file name for commodity balance duals
39const COMMODITY_BALANCE_DUALS_FILE_NAME: &str = "debug_commodity_balance_duals.csv";
40
41/// The output file name for unmet demand values
42const UNMET_DEMAND_FILE_NAME: &str = "debug_unmet_demand.csv";
43
44/// The output file name for extra solver output values
45const SOLVER_VALUES_FILE_NAME: &str = "debug_solver.csv";
46
47/// The output file name for appraisal results
48const APPRAISAL_RESULTS_FILE_NAME: &str = "debug_appraisal_results.csv";
49
50/// The output file name for appraisal time slice results
51const APPRAISAL_RESULTS_TIME_SLICE_FILE_NAME: &str = "debug_appraisal_results_time_slices.csv";
52
53/// Get the default output directory for the model
54pub fn get_output_dir(model_dir: &Path, results_root: PathBuf) -> Result<PathBuf> {
55    // Get the model name from the dir path. This ends up being convoluted because we need to check
56    // for all possible errors. Ugh.
57    let model_dir = model_dir
58        .canonicalize() // canonicalise in case the user has specified "."
59        .context("Could not resolve path to model")?;
60
61    let model_name = model_dir
62        .file_name()
63        .context("Model cannot be in root folder")?
64        .to_str()
65        .context("Invalid chars in model dir name")?;
66
67    // Construct path
68    Ok([results_root, model_name.into()].iter().collect())
69}
70
71/// Get the default output directory for commodity flow graphs for the model
72pub fn get_graphs_dir(model_dir: &Path, graph_results_root: PathBuf) -> Result<PathBuf> {
73    let model_dir = model_dir
74        .canonicalize() // canonicalise in case the user has specified "."
75        .context("Could not resolve path to model")?;
76    let model_name = model_dir
77        .file_name()
78        .context("Model cannot be in root folder")?
79        .to_str()
80        .context("Invalid chars in model dir name")?;
81    Ok([graph_results_root, model_name.into()].iter().collect())
82}
83
84/// Create a new output directory for the model, optionally overwriting existing data
85///
86/// # Arguments
87///
88/// * `output_dir` - The output directory to create/overwrite
89/// * `allow_overwrite` - Whether to delete and recreate the folder if it is non-empty
90///
91/// # Returns
92///
93/// True if the output dir contained existing data that was deleted, false if not, or an error.
94pub fn create_output_directory(output_dir: &Path, allow_overwrite: bool) -> Result<bool> {
95    // If the folder already exists, then delete it
96    let overwrite = if let Ok(mut it) = fs::read_dir(output_dir) {
97        if it.next().is_none() {
98            // Folder exists and is empty: nothing to do
99            return Ok(false);
100        }
101
102        ensure!(
103            allow_overwrite,
104            "Output folder already exists and is not empty. \
105            Please delete the folder or pass the --overwrite command-line option."
106        );
107
108        fs::remove_dir_all(output_dir).context("Could not delete folder")?;
109        true
110    } else {
111        false
112    };
113
114    // Try to create the directory, with parents
115    fs::create_dir_all(output_dir)?;
116
117    Ok(overwrite)
118}
119
120/// Represents a row in the assets output CSV file.
121#[derive(Serialize, Deserialize, Debug, PartialEq)]
122struct AssetRow {
123    asset_id: AssetID,
124    process_id: ProcessID,
125    region_id: RegionID,
126    agent_id: AgentID,
127    group_id: Option<AssetGroupID>,
128    commission_year: u32,
129    decommission_year: Option<u32>,
130    capacity: Capacity,
131}
132
133impl AssetRow {
134    /// Create a new [`AssetRow`]
135    fn new(asset: &Asset) -> Self {
136        Self {
137            asset_id: asset.id().unwrap(),
138            process_id: asset.process_id().clone(),
139            region_id: asset.region_id().clone(),
140            agent_id: asset.agent_id().unwrap().clone(),
141            group_id: asset.group_id(),
142            commission_year: asset.commission_year(),
143            decommission_year: asset.decommission_year(),
144            capacity: asset.total_capacity(),
145        }
146    }
147}
148
149/// Represents the flow-related data in a row of the commodity flows CSV file.
150#[derive(Serialize, Deserialize, Debug, PartialEq)]
151struct CommodityFlowRow {
152    milestone_year: u32,
153    asset_id: AssetID,
154    commodity_id: CommodityID,
155    time_slice: TimeSliceID,
156    flow: Flow,
157}
158
159/// Represents a row in the commodity prices CSV file
160#[derive(Serialize, Deserialize, Debug, PartialEq)]
161struct CommodityPriceRow {
162    milestone_year: u32,
163    commodity_id: CommodityID,
164    region_id: RegionID,
165    time_slice: TimeSliceID,
166    price: MoneyPerFlow,
167}
168
169/// Represents the activity in a row of the activity CSV file
170#[derive(Serialize, Deserialize, Debug, PartialEq)]
171struct ActivityRow {
172    milestone_year: u32,
173    run_description: String,
174    asset_id: Option<AssetID>,
175    process_id: ProcessID,
176    region_id: RegionID,
177    time_slice: TimeSliceID,
178    activity: Option<Activity>,
179    activity_dual: Option<MoneyPerActivity>,
180    column_dual: Option<MoneyPerActivity>,
181}
182
183/// Represents the commodity balance duals data in a row of the commodity balance duals CSV file
184#[derive(Serialize, Deserialize, Debug, PartialEq)]
185struct CommodityBalanceDualsRow {
186    milestone_year: u32,
187    run_description: String,
188    commodity_id: CommodityID,
189    region_id: RegionID,
190    time_slice: TimeSliceID,
191    value: MoneyPerFlow,
192}
193
194/// Represents the unmet demand data in a row of the unmet demand CSV file
195#[derive(Serialize, Deserialize, Debug, PartialEq)]
196struct UnmetDemandRow {
197    milestone_year: u32,
198    run_description: String,
199    commodity_id: CommodityID,
200    region_id: RegionID,
201    time_slice: TimeSliceID,
202    value: Flow,
203}
204
205/// Represents solver output values
206#[derive(Serialize, Deserialize, Debug, PartialEq)]
207struct SolverValuesRow {
208    milestone_year: u32,
209    run_description: String,
210    objective_value: Money,
211}
212
213/// Represents the appraisal results in a row of the appraisal results CSV file
214#[derive(Serialize, Deserialize, Debug, PartialEq)]
215struct AppraisalResultsRow {
216    milestone_year: u32,
217    run_description: String,
218    asset_id: Option<AssetID>,
219    process_id: ProcessID,
220    region_id: RegionID,
221    capacity: Capacity,
222    capacity_coefficient: MoneyPerCapacity,
223    metric: Option<f64>,
224}
225
226/// Represents the appraisal results in a row of the appraisal results CSV file
227#[derive(Serialize, Deserialize, Debug, PartialEq)]
228struct AppraisalResultsTimeSliceRow {
229    milestone_year: u32,
230    run_description: String,
231    asset_id: Option<AssetID>,
232    process_id: ProcessID,
233    region_id: RegionID,
234    time_slice: TimeSliceID,
235    activity: Activity,
236    activity_coefficient: MoneyPerActivity,
237    demand: Flow,
238    unmet_demand: Flow,
239}
240
241/// For writing extra debug information about the model
242struct DebugDataWriter {
243    context: Option<String>,
244    commodity_balance_duals_writer: csv::Writer<File>,
245    unmet_demand_writer: csv::Writer<File>,
246    solver_values_writer: csv::Writer<File>,
247    appraisal_results_writer: csv::Writer<File>,
248    appraisal_results_time_slice_writer: csv::Writer<File>,
249    dispatch_asset_writer: csv::Writer<File>,
250}
251
252impl DebugDataWriter {
253    /// Open CSV files to write debug info to
254    ///
255    /// # Arguments
256    ///
257    /// * `output_path` - Folder where files will be saved
258    fn create(output_path: &Path) -> Result<Self> {
259        let new_writer = |file_name| {
260            let file_path = output_path.join(file_name);
261            csv::Writer::from_path(file_path)
262        };
263
264        Ok(Self {
265            context: None,
266            commodity_balance_duals_writer: new_writer(COMMODITY_BALANCE_DUALS_FILE_NAME)?,
267            unmet_demand_writer: new_writer(UNMET_DEMAND_FILE_NAME)?,
268            solver_values_writer: new_writer(SOLVER_VALUES_FILE_NAME)?,
269            appraisal_results_writer: new_writer(APPRAISAL_RESULTS_FILE_NAME)?,
270            appraisal_results_time_slice_writer: new_writer(
271                APPRAISAL_RESULTS_TIME_SLICE_FILE_NAME,
272            )?,
273            dispatch_asset_writer: new_writer(ACTIVITY_ASSET_DISPATCH)?,
274        })
275    }
276
277    /// Prepend the current context to the run description
278    fn with_context(&self, run_description: &str) -> String {
279        if let Some(context) = &self.context {
280            format!("{context}; {run_description}")
281        } else {
282            run_description.to_string()
283        }
284    }
285
286    /// Write debug info about the dispatch optimisation
287    fn write_dispatch_debug_info(
288        &mut self,
289        milestone_year: u32,
290        run_description: &str,
291        solution: &Solution,
292    ) -> Result<()> {
293        self.write_activity(
294            milestone_year,
295            run_description,
296            solution.iter_activity(),
297            solution.iter_activity_duals(),
298            solution.iter_column_duals(),
299        )?;
300        self.write_commodity_balance_duals(
301            milestone_year,
302            run_description,
303            solution.iter_commodity_balance_duals(),
304        )?;
305        self.write_unmet_demand(
306            milestone_year,
307            run_description,
308            solution.iter_unmet_demand(),
309        )?;
310        self.write_solver_values(milestone_year, run_description, solution.objective_value)?;
311        Ok(())
312    }
313
314    // Write activity to file
315    fn write_activity<'a, I, J, K>(
316        &mut self,
317        milestone_year: u32,
318        run_description: &str,
319        iter_activity: I,
320        iter_activity_duals: J,
321        iter_column_duals: K,
322    ) -> Result<()>
323    where
324        I: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, Activity)>,
325        J: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, MoneyPerActivity)>,
326        K: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, MoneyPerActivity)>,
327    {
328        // To account for different order of entries or missing ones, we first compile data in hash map
329        type CompiledActivityData = (
330            Option<Activity>,
331            Option<MoneyPerActivity>,
332            Option<MoneyPerActivity>,
333        );
334        let mut map: IndexMap<(&AssetRef, &TimeSliceID), CompiledActivityData> = IndexMap::new();
335
336        // For the activities
337        for (asset, time_slice, activity) in iter_activity {
338            map.entry((asset, time_slice)).or_default().0 = Some(activity);
339        }
340        // The activity duals
341        for (asset, time_slice, activity_dual) in iter_activity_duals {
342            map.entry((asset, time_slice)).or_default().1 = Some(activity_dual);
343        }
344        // And the column duals
345        for (asset, time_slice, column_dual) in iter_column_duals {
346            map.entry((asset, time_slice)).or_default().2 = Some(column_dual);
347        }
348
349        for (asset, time_slice, activity, activity_dual, column_dual) in
350            map.iter()
351                .map(|(&(agent, ts), &(activity, activity_dual, column_dual))| {
352                    (agent, ts, activity, activity_dual, column_dual)
353                })
354        {
355            let row = ActivityRow {
356                milestone_year,
357                run_description: self.with_context(run_description),
358                asset_id: asset.id(),
359                process_id: asset.process_id().clone(),
360                region_id: asset.region_id().clone(),
361                time_slice: time_slice.clone(),
362                activity,
363                activity_dual,
364                column_dual,
365            };
366            self.dispatch_asset_writer.serialize(row)?;
367        }
368
369        Ok(())
370    }
371
372    /// Write commodity balance duals to file
373    fn write_commodity_balance_duals<'a, I>(
374        &mut self,
375        milestone_year: u32,
376        run_description: &str,
377        iter: I,
378    ) -> Result<()>
379    where
380        I: Iterator<Item = (&'a CommodityID, &'a RegionID, &'a TimeSliceID, MoneyPerFlow)>,
381    {
382        for (commodity_id, region_id, time_slice, value) in iter {
383            let row = CommodityBalanceDualsRow {
384                milestone_year,
385                run_description: self.with_context(run_description),
386                commodity_id: commodity_id.clone(),
387                region_id: region_id.clone(),
388                time_slice: time_slice.clone(),
389                value,
390            };
391            self.commodity_balance_duals_writer.serialize(row)?;
392        }
393
394        Ok(())
395    }
396
397    /// Write unmet demand values to file
398    fn write_unmet_demand<'a, I>(
399        &mut self,
400        milestone_year: u32,
401        run_description: &str,
402        iter: I,
403    ) -> Result<()>
404    where
405        I: Iterator<Item = (&'a CommodityID, &'a RegionID, &'a TimeSliceID, Flow)>,
406    {
407        for (commodity_id, region_id, time_slice, value) in iter {
408            let row = UnmetDemandRow {
409                milestone_year,
410                run_description: self.with_context(run_description),
411                commodity_id: commodity_id.clone(),
412                region_id: region_id.clone(),
413                time_slice: time_slice.clone(),
414                value,
415            };
416            self.unmet_demand_writer.serialize(row)?;
417        }
418
419        Ok(())
420    }
421
422    /// Write additional solver output values to file
423    fn write_solver_values(
424        &mut self,
425        milestone_year: u32,
426        run_description: &str,
427        objective_value: Money,
428    ) -> Result<()> {
429        let row = SolverValuesRow {
430            milestone_year,
431            run_description: self.with_context(run_description),
432            objective_value,
433        };
434        self.solver_values_writer.serialize(row)?;
435        self.solver_values_writer.flush()?;
436
437        Ok(())
438    }
439
440    /// Write appraisal results to file
441    fn write_appraisal_results(
442        &mut self,
443        milestone_year: u32,
444        run_description: &str,
445        appraisal_results: &[AppraisalOutput],
446    ) -> Result<()> {
447        for result in appraisal_results {
448            let row = AppraisalResultsRow {
449                milestone_year,
450                run_description: self.with_context(run_description),
451                asset_id: result.asset.id(),
452                process_id: result.asset.process_id().clone(),
453                region_id: result.asset.region_id().clone(),
454                capacity: result.capacity.total_capacity(),
455                capacity_coefficient: result.coefficients.capacity_coefficient,
456                metric: result.metric.as_ref().map(|m| m.value()),
457            };
458            self.appraisal_results_writer.serialize(row)?;
459        }
460
461        Ok(())
462    }
463
464    /// Write appraisal results to file
465    fn write_appraisal_time_slice_results(
466        &mut self,
467        milestone_year: u32,
468        run_description: &str,
469        appraisal_results: &[AppraisalOutput],
470        demand: &IndexMap<TimeSliceID, Flow>,
471    ) -> Result<()> {
472        for result in appraisal_results {
473            for (time_slice, activity) in &result.activity {
474                let activity_coefficient = result.coefficients.activity_coefficients[time_slice];
475                let demand = demand[time_slice];
476                let unmet_demand = result.unmet_demand[time_slice];
477                let row = AppraisalResultsTimeSliceRow {
478                    milestone_year,
479                    run_description: self.with_context(run_description),
480                    asset_id: result.asset.id(),
481                    process_id: result.asset.process_id().clone(),
482                    region_id: result.asset.region_id().clone(),
483                    time_slice: time_slice.clone(),
484                    activity: *activity,
485                    activity_coefficient,
486                    demand,
487                    unmet_demand,
488                };
489                self.appraisal_results_time_slice_writer.serialize(row)?;
490            }
491        }
492
493        Ok(())
494    }
495
496    /// Flush the underlying streams
497    fn flush(&mut self) -> Result<()> {
498        self.commodity_balance_duals_writer.flush()?;
499        self.unmet_demand_writer.flush()?;
500        self.solver_values_writer.flush()?;
501        self.appraisal_results_writer.flush()?;
502        self.appraisal_results_time_slice_writer.flush()?;
503        self.dispatch_asset_writer.flush()?;
504
505        Ok(())
506    }
507}
508
509/// An object for writing commodity prices to file
510pub struct DataWriter {
511    assets_path: PathBuf,
512    flows_writer: csv::Writer<File>,
513    prices_writer: csv::Writer<File>,
514    debug_writer: Option<DebugDataWriter>,
515}
516
517impl DataWriter {
518    /// Open CSV files to write output data to
519    ///
520    /// # Arguments
521    ///
522    /// * `output_path` - Folder where files will be saved
523    /// * `model_path` - Path to input model
524    /// * `save_debug_info` - Whether to include extra CSV files for debugging model
525    pub fn create(output_path: &Path, model_path: &Path, save_debug_info: bool) -> Result<Self> {
526        write_metadata(output_path, model_path).context("Failed to save metadata")?;
527
528        let new_writer = |file_name| {
529            let file_path = output_path.join(file_name);
530            csv::Writer::from_path(file_path)
531        };
532
533        let debug_writer = if save_debug_info {
534            // Create debug CSV files
535            Some(DebugDataWriter::create(output_path)?)
536        } else {
537            None
538        };
539
540        Ok(Self {
541            assets_path: output_path.join(ASSETS_FILE_NAME),
542            flows_writer: new_writer(COMMODITY_FLOWS_FILE_NAME)?,
543            prices_writer: new_writer(COMMODITY_PRICES_FILE_NAME)?,
544            debug_writer,
545        })
546    }
547
548    /// Write debug info about the dispatch optimisation
549    pub fn write_dispatch_debug_info(
550        &mut self,
551        milestone_year: u32,
552        run_description: &str,
553        solution: &Solution,
554    ) -> Result<()> {
555        if let Some(wtr) = &mut self.debug_writer {
556            wtr.write_dispatch_debug_info(milestone_year, run_description, solution)?;
557        }
558
559        Ok(())
560    }
561
562    /// Write debug info about the investment appraisal
563    pub fn write_appraisal_debug_info(
564        &mut self,
565        milestone_year: u32,
566        run_description: &str,
567        appraisal_results: &[AppraisalOutput],
568        demand: &IndexMap<TimeSliceID, Flow>,
569    ) -> Result<()> {
570        if let Some(wtr) = &mut self.debug_writer {
571            wtr.write_appraisal_results(milestone_year, run_description, appraisal_results)?;
572            wtr.write_appraisal_time_slice_results(
573                milestone_year,
574                run_description,
575                appraisal_results,
576                demand,
577            )?;
578        }
579
580        Ok(())
581    }
582
583    /// Write assets to a CSV file.
584    ///
585    /// The whole file is written at once and is overwritten with subsequent invocations. This is
586    /// done so that partial results will be written in the case of errors and so that the user can
587    /// see the results while the simulation is still running.
588    ///
589    /// The file is sorted by asset ID.
590    ///
591    /// # Panics
592    ///
593    /// Panics if any of the assets has not yet been commissioned (decommissioned assets are fine).
594    pub fn write_assets<'a, I>(&mut self, assets: I) -> Result<()>
595    where
596        I: Iterator<Item = &'a AssetRef>,
597    {
598        let mut writer = csv::Writer::from_path(&self.assets_path)?;
599        for asset in assets.sorted() {
600            let row = AssetRow::new(asset);
601            writer.serialize(row)?;
602        }
603        writer.flush()?;
604
605        Ok(())
606    }
607
608    /// Write commodity flows to a CSV file
609    pub fn write_flows(&mut self, milestone_year: u32, flow_map: &FlowMap) -> Result<()> {
610        for ((asset, commodity_id, time_slice), flow) in flow_map {
611            let row = CommodityFlowRow {
612                milestone_year,
613                asset_id: asset.id().unwrap(),
614                commodity_id: commodity_id.clone(),
615                time_slice: time_slice.clone(),
616                flow: *flow,
617            };
618            self.flows_writer.serialize(row)?;
619        }
620
621        Ok(())
622    }
623
624    /// Write commodity prices to a CSV file
625    pub fn write_prices(&mut self, milestone_year: u32, prices: &CommodityPrices) -> Result<()> {
626        for (commodity_id, region_id, time_slice, price) in prices.iter() {
627            let row = CommodityPriceRow {
628                milestone_year,
629                commodity_id: commodity_id.clone(),
630                region_id: region_id.clone(),
631                time_slice: time_slice.clone(),
632                price,
633            };
634            self.prices_writer.serialize(row)?;
635        }
636
637        Ok(())
638    }
639
640    /// Flush the underlying streams
641    pub fn flush(&mut self) -> Result<()> {
642        self.flows_writer.flush()?;
643        self.prices_writer.flush()?;
644        if let Some(wtr) = &mut self.debug_writer {
645            wtr.flush()?;
646        }
647
648        Ok(())
649    }
650
651    /// Add context to the debug writer
652    pub fn set_debug_context(&mut self, context: String) {
653        if let Some(wtr) = &mut self.debug_writer {
654            wtr.context = Some(context);
655        }
656    }
657
658    /// Clear context from the debug writer
659    pub fn clear_debug_context(&mut self) {
660        if let Some(wtr) = &mut self.debug_writer {
661            wtr.context = None;
662        }
663    }
664}
665
666#[cfg(test)]
667mod tests {
668    use super::*;
669    use crate::asset::AssetPool;
670    use crate::fixture::{appraisal_output, asset, assets, commodity_id, region_id, time_slice};
671    use crate::simulation::investment::appraisal::AppraisalOutput;
672    use crate::time_slice::TimeSliceID;
673    use indexmap::indexmap;
674    use itertools::{Itertools, assert_equal};
675    use rstest::rstest;
676    use std::iter;
677    use tempfile::tempdir;
678
679    #[rstest]
680    fn write_assets(assets: AssetPool) {
681        let dir = tempdir().unwrap();
682
683        // Write an asset
684        {
685            let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
686            writer.write_assets(assets.iter()).unwrap();
687            writer.flush().unwrap();
688        }
689
690        // Read back and compare
691        let asset = assets.iter().next().unwrap();
692        let expected = AssetRow::new(asset);
693        let records: Vec<AssetRow> = csv::Reader::from_path(dir.path().join(ASSETS_FILE_NAME))
694            .unwrap()
695            .into_deserialize()
696            .try_collect()
697            .unwrap();
698        assert_equal(records, iter::once(expected));
699    }
700
701    #[rstest]
702    fn write_flows(assets: AssetPool, commodity_id: CommodityID, time_slice: TimeSliceID) {
703        let milestone_year = 2020;
704        let asset = assets.iter().next().unwrap();
705        let flow_map = indexmap! {
706            (asset.clone(), commodity_id.clone(), time_slice.clone()) => Flow(42.0)
707        };
708
709        // Write a flow
710        let dir = tempdir().unwrap();
711        {
712            let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
713            writer.write_flows(milestone_year, &flow_map).unwrap();
714            writer.flush().unwrap();
715        }
716
717        // Read back and compare
718        let expected = CommodityFlowRow {
719            milestone_year,
720            asset_id: asset.id().unwrap(),
721            commodity_id,
722            time_slice,
723            flow: Flow(42.0),
724        };
725        let records: Vec<CommodityFlowRow> =
726            csv::Reader::from_path(dir.path().join(COMMODITY_FLOWS_FILE_NAME))
727                .unwrap()
728                .into_deserialize()
729                .try_collect()
730                .unwrap();
731        assert_equal(records, iter::once(expected));
732    }
733
734    #[rstest]
735    fn write_prices(commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID) {
736        let milestone_year = 2020;
737        let price = MoneyPerFlow(42.0);
738        let mut prices = CommodityPrices::default();
739        prices.insert(&commodity_id, &region_id, &time_slice, price);
740
741        let dir = tempdir().unwrap();
742
743        // Write a price
744        {
745            let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
746            writer.write_prices(milestone_year, &prices).unwrap();
747            writer.flush().unwrap();
748        }
749
750        // Read back and compare
751        let expected = CommodityPriceRow {
752            milestone_year,
753            commodity_id,
754            region_id,
755            time_slice,
756            price,
757        };
758        let records: Vec<CommodityPriceRow> =
759            csv::Reader::from_path(dir.path().join(COMMODITY_PRICES_FILE_NAME))
760                .unwrap()
761                .into_deserialize()
762                .try_collect()
763                .unwrap();
764        assert_equal(records, iter::once(expected));
765    }
766
767    #[rstest]
768    fn write_commodity_balance_duals(
769        commodity_id: CommodityID,
770        region_id: RegionID,
771        time_slice: TimeSliceID,
772    ) {
773        let milestone_year = 2020;
774        let run_description = "test_run".to_string();
775        let value = MoneyPerFlow(0.5);
776        let dir = tempdir().unwrap();
777
778        // Write commodity balance dual
779        {
780            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
781            writer
782                .write_commodity_balance_duals(
783                    milestone_year,
784                    &run_description,
785                    iter::once((&commodity_id, &region_id, &time_slice, value)),
786                )
787                .unwrap();
788            writer.flush().unwrap();
789        }
790
791        // Read back and compare
792        let expected = CommodityBalanceDualsRow {
793            milestone_year,
794            run_description,
795            commodity_id,
796            region_id,
797            time_slice,
798            value,
799        };
800        let records: Vec<CommodityBalanceDualsRow> =
801            csv::Reader::from_path(dir.path().join(COMMODITY_BALANCE_DUALS_FILE_NAME))
802                .unwrap()
803                .into_deserialize()
804                .try_collect()
805                .unwrap();
806        assert_equal(records, iter::once(expected));
807    }
808
809    #[rstest]
810    fn write_unmet_demand(commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID) {
811        let milestone_year = 2020;
812        let run_description = "test_run".to_string();
813        let value = Flow(0.5);
814        let dir = tempdir().unwrap();
815
816        // Write unmet demand
817        {
818            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
819            writer
820                .write_unmet_demand(
821                    milestone_year,
822                    &run_description,
823                    iter::once((&commodity_id, &region_id, &time_slice, value)),
824                )
825                .unwrap();
826            writer.flush().unwrap();
827        }
828
829        // Read back and compare
830        let expected = UnmetDemandRow {
831            milestone_year,
832            run_description,
833            commodity_id,
834            region_id,
835            time_slice,
836            value,
837        };
838        let records: Vec<UnmetDemandRow> =
839            csv::Reader::from_path(dir.path().join(UNMET_DEMAND_FILE_NAME))
840                .unwrap()
841                .into_deserialize()
842                .try_collect()
843                .unwrap();
844        assert_equal(records, iter::once(expected));
845    }
846
847    #[rstest]
848    fn write_activity(assets: AssetPool, time_slice: TimeSliceID) {
849        let milestone_year = 2020;
850        let run_description = "test_run".to_string();
851        let activity = Activity(100.5);
852        let activity_dual = MoneyPerActivity(-1.5);
853        let column_dual = MoneyPerActivity(5.0);
854        let dir = tempdir().unwrap();
855        let asset = assets.iter().next().unwrap();
856
857        // Write activity
858        {
859            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
860            writer
861                .write_activity(
862                    milestone_year,
863                    &run_description,
864                    iter::once((asset, &time_slice, activity)),
865                    iter::once((asset, &time_slice, activity_dual)),
866                    iter::once((asset, &time_slice, column_dual)),
867                )
868                .unwrap();
869            writer.flush().unwrap();
870        }
871
872        // Read back and compare
873        let expected = ActivityRow {
874            milestone_year,
875            run_description,
876            asset_id: asset.id(),
877            process_id: asset.process_id().clone(),
878            region_id: asset.region_id().clone(),
879            time_slice,
880            activity: Some(activity),
881            activity_dual: Some(activity_dual),
882            column_dual: Some(column_dual),
883        };
884        let records: Vec<ActivityRow> =
885            csv::Reader::from_path(dir.path().join(ACTIVITY_ASSET_DISPATCH))
886                .unwrap()
887                .into_deserialize()
888                .try_collect()
889                .unwrap();
890        assert_equal(records, iter::once(expected));
891    }
892
893    #[rstest]
894    fn write_activity_with_missing_keys(assets: AssetPool, time_slice: TimeSliceID) {
895        let milestone_year = 2020;
896        let run_description = "test_run".to_string();
897        let activity = Activity(100.5);
898        let dir = tempdir().unwrap();
899        let asset = assets.iter().next().unwrap();
900
901        // Write activity
902        {
903            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
904            writer
905                .write_activity(
906                    milestone_year,
907                    &run_description,
908                    iter::once((asset, &time_slice, activity)),
909                    iter::empty::<(&AssetRef, &TimeSliceID, MoneyPerActivity)>(),
910                    iter::empty::<(&AssetRef, &TimeSliceID, MoneyPerActivity)>(),
911                )
912                .unwrap();
913            writer.flush().unwrap();
914        }
915
916        // Read back and compare
917        let expected = ActivityRow {
918            milestone_year,
919            run_description,
920            asset_id: asset.id(),
921            process_id: asset.process_id().clone(),
922            region_id: asset.region_id().clone(),
923            time_slice,
924            activity: Some(activity),
925            activity_dual: None,
926            column_dual: None,
927        };
928        let records: Vec<ActivityRow> =
929            csv::Reader::from_path(dir.path().join(ACTIVITY_ASSET_DISPATCH))
930                .unwrap()
931                .into_deserialize()
932                .try_collect()
933                .unwrap();
934        assert_equal(records, iter::once(expected));
935    }
936
937    #[rstest]
938    fn write_solver_values() {
939        let milestone_year = 2020;
940        let run_description = "test_run".to_string();
941        let objective_value = Money(1234.56);
942        let dir = tempdir().unwrap();
943
944        // Write solver values
945        {
946            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
947            writer
948                .write_solver_values(milestone_year, &run_description, objective_value)
949                .unwrap();
950            writer.flush().unwrap();
951        }
952
953        // Read back and compare
954        let expected = SolverValuesRow {
955            milestone_year,
956            run_description,
957            objective_value,
958        };
959        let records: Vec<SolverValuesRow> =
960            csv::Reader::from_path(dir.path().join(SOLVER_VALUES_FILE_NAME))
961                .unwrap()
962                .into_deserialize()
963                .try_collect()
964                .unwrap();
965        assert_equal(records, iter::once(expected));
966    }
967
968    #[rstest]
969    fn write_appraisal_results(asset: Asset, appraisal_output: AppraisalOutput) {
970        let milestone_year = 2020;
971        let run_description = "test_run".to_string();
972        let dir = tempdir().unwrap();
973
974        // Write appraisal results
975        {
976            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
977            writer
978                .write_appraisal_results(milestone_year, &run_description, &[appraisal_output])
979                .unwrap();
980            writer.flush().unwrap();
981        }
982
983        // Read back and compare
984        let expected = AppraisalResultsRow {
985            milestone_year,
986            run_description,
987            asset_id: None,
988            process_id: asset.process_id().clone(),
989            region_id: asset.region_id().clone(),
990            capacity: Capacity(42.0),
991            capacity_coefficient: MoneyPerCapacity(2.14),
992            metric: Some(4.14),
993        };
994        let records: Vec<AppraisalResultsRow> =
995            csv::Reader::from_path(dir.path().join(APPRAISAL_RESULTS_FILE_NAME))
996                .unwrap()
997                .into_deserialize()
998                .try_collect()
999                .unwrap();
1000        assert_equal(records, iter::once(expected));
1001    }
1002
1003    #[rstest]
1004    fn write_appraisal_time_slice_results(
1005        asset: Asset,
1006        appraisal_output: AppraisalOutput,
1007        time_slice: TimeSliceID,
1008    ) {
1009        let milestone_year = 2020;
1010        let run_description = "test_run".to_string();
1011        let dir = tempdir().unwrap();
1012        let demand = indexmap! {time_slice.clone() => Flow(100.0) };
1013
1014        // Write appraisal time slice results
1015        {
1016            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
1017            writer
1018                .write_appraisal_time_slice_results(
1019                    milestone_year,
1020                    &run_description,
1021                    &[appraisal_output],
1022                    &demand,
1023                )
1024                .unwrap();
1025            writer.flush().unwrap();
1026        }
1027
1028        // Read back and compare
1029        let expected = AppraisalResultsTimeSliceRow {
1030            milestone_year,
1031            run_description,
1032            asset_id: None,
1033            process_id: asset.process_id().clone(),
1034            region_id: asset.region_id().clone(),
1035            time_slice: time_slice.clone(),
1036            activity: Activity(10.0),
1037            activity_coefficient: MoneyPerActivity(0.5),
1038            demand: Flow(100.0),
1039            unmet_demand: Flow(5.0),
1040        };
1041        let records: Vec<AppraisalResultsTimeSliceRow> =
1042            csv::Reader::from_path(dir.path().join(APPRAISAL_RESULTS_TIME_SLICE_FILE_NAME))
1043                .unwrap()
1044                .into_deserialize()
1045                .try_collect()
1046                .unwrap();
1047        assert_equal(records, iter::once(expected));
1048    }
1049
1050    #[test]
1051    fn create_output_directory_new_directory() {
1052        let temp_dir = tempdir().unwrap();
1053        let output_dir = temp_dir.path().join("new_output");
1054
1055        // Create a new directory should succeed and return false (no overwrite)
1056        let result = create_output_directory(&output_dir, false).unwrap();
1057        assert!(!result);
1058        assert!(output_dir.exists());
1059        assert!(output_dir.is_dir());
1060    }
1061
1062    #[test]
1063    fn create_output_directory_existing_empty_directory() {
1064        let temp_dir = tempdir().unwrap();
1065        let output_dir = temp_dir.path().join("empty_output");
1066
1067        // Create the directory first
1068        fs::create_dir(&output_dir).unwrap();
1069
1070        // Creating again should succeed and return false (no overwrite needed)
1071        let result = create_output_directory(&output_dir, false).unwrap();
1072        assert!(!result);
1073        assert!(output_dir.exists());
1074        assert!(output_dir.is_dir());
1075    }
1076
1077    #[test]
1078    fn create_output_directory_existing_with_files_no_overwrite() {
1079        let temp_dir = tempdir().unwrap();
1080        let output_dir = temp_dir.path().join("output_with_files");
1081
1082        // Create directory with a file
1083        fs::create_dir(&output_dir).unwrap();
1084        fs::write(output_dir.join("existing_file.txt"), "some content").unwrap();
1085
1086        // Should fail when allow_overwrite is false
1087        let result = create_output_directory(&output_dir, false);
1088        assert!(result.is_err());
1089        assert!(
1090            result
1091                .unwrap_err()
1092                .to_string()
1093                .contains("Output folder already exists")
1094        );
1095    }
1096
1097    #[test]
1098    fn create_output_directory_existing_with_files_allow_overwrite() {
1099        let temp_dir = tempdir().unwrap();
1100        let output_dir = temp_dir.path().join("output_with_files");
1101
1102        // Create directory with a file
1103        fs::create_dir(&output_dir).unwrap();
1104        let file_path = output_dir.join("existing_file.txt");
1105        fs::write(&file_path, "some content").unwrap();
1106
1107        // Should succeed when allow_overwrite is true and return true (overwrite occurred)
1108        let result = create_output_directory(&output_dir, true).unwrap();
1109        assert!(result);
1110        assert!(output_dir.exists());
1111        assert!(output_dir.is_dir());
1112        assert!(!file_path.exists()); // File should be gone
1113    }
1114
1115    #[test]
1116    fn create_output_directory_nested_path() {
1117        let temp_dir = tempdir().unwrap();
1118        let output_dir = temp_dir.path().join("nested").join("path").join("output");
1119
1120        // Should create nested directories and return false (no overwrite)
1121        let result = create_output_directory(&output_dir, false).unwrap();
1122        assert!(!result);
1123        assert!(output_dir.exists());
1124        assert!(output_dir.is_dir());
1125    }
1126
1127    #[test]
1128    fn create_output_directory_existing_subdirs_with_files_allow_overwrite() {
1129        let temp_dir = tempdir().unwrap();
1130        let output_dir = temp_dir.path().join("output_with_subdirs");
1131
1132        // Create directory structure with files
1133        fs::create_dir_all(output_dir.join("subdir")).unwrap();
1134        fs::write(output_dir.join("file1.txt"), "content1").unwrap();
1135        fs::write(output_dir.join("subdir").join("file2.txt"), "content2").unwrap();
1136
1137        // Should succeed when allow_overwrite is true and return true (overwrite occurred)
1138        let result = create_output_directory(&output_dir, true).unwrap();
1139        assert!(result);
1140        assert!(output_dir.exists());
1141        assert!(output_dir.is_dir());
1142        // All previous content should be gone
1143        assert!(!output_dir.join("file1.txt").exists());
1144        assert!(!output_dir.join("subdir").exists());
1145    }
1146}