muse2/
output.rs

1//! The module responsible for writing output data to disk.
2use crate::agent::AgentID;
3use crate::asset::{Asset, AssetGroupID, AssetID, AssetRef};
4use crate::commodity::CommodityID;
5use crate::process::ProcessID;
6use crate::region::RegionID;
7use crate::simulation::CommodityPrices;
8use crate::simulation::investment::appraisal::AppraisalOutput;
9use crate::simulation::optimisation::{FlowMap, Solution};
10use crate::time_slice::TimeSliceID;
11use crate::units::{
12    Activity, Capacity, Flow, Money, MoneyPerActivity, MoneyPerCapacity, MoneyPerFlow,
13};
14use anyhow::{Context, Result, ensure};
15use csv;
16use indexmap::IndexMap;
17use itertools::Itertools;
18use serde::{Deserialize, Serialize};
19use std::fs;
20use std::fs::File;
21use std::path::{Path, PathBuf};
22
23pub mod metadata;
24use metadata::write_metadata;
25
26/// The output file name for commodity flows
27const COMMODITY_FLOWS_FILE_NAME: &str = "commodity_flows.csv";
28
29/// The output file name for commodity prices
30const COMMODITY_PRICES_FILE_NAME: &str = "commodity_prices.csv";
31
32/// The output file name for assets
33const ASSETS_FILE_NAME: &str = "assets.csv";
34
35/// Debug output file for asset dispatch
36const ACTIVITY_ASSET_DISPATCH: &str = "debug_dispatch_assets.csv";
37
38/// The output file name for commodity balance duals
39const COMMODITY_BALANCE_DUALS_FILE_NAME: &str = "debug_commodity_balance_duals.csv";
40
41/// The output file name for unmet demand values
42const UNMET_DEMAND_FILE_NAME: &str = "debug_unmet_demand.csv";
43
44/// The output file name for extra solver output values
45const SOLVER_VALUES_FILE_NAME: &str = "debug_solver.csv";
46
47/// The output file name for appraisal results
48const APPRAISAL_RESULTS_FILE_NAME: &str = "debug_appraisal_results.csv";
49
50/// The output file name for appraisal time slice results
51const APPRAISAL_RESULTS_TIME_SLICE_FILE_NAME: &str = "debug_appraisal_results_time_slices.csv";
52
53/// Get the default output directory for the model
54pub fn get_output_dir(model_dir: &Path, results_root: PathBuf) -> Result<PathBuf> {
55    // Get the model name from the dir path. This ends up being convoluted because we need to check
56    // for all possible errors. Ugh.
57    let model_dir = model_dir
58        .canonicalize() // canonicalise in case the user has specified "."
59        .context("Could not resolve path to model")?;
60
61    let model_name = model_dir
62        .file_name()
63        .context("Model cannot be in root folder")?
64        .to_str()
65        .context("Invalid chars in model dir name")?;
66
67    // Construct path
68    Ok([results_root, model_name.into()].iter().collect())
69}
70
71/// Get the default output directory for commodity flow graphs for the model
72pub fn get_graphs_dir(model_dir: &Path, graph_results_root: PathBuf) -> Result<PathBuf> {
73    let model_dir = model_dir
74        .canonicalize() // canonicalise in case the user has specified "."
75        .context("Could not resolve path to model")?;
76    let model_name = model_dir
77        .file_name()
78        .context("Model cannot be in root folder")?
79        .to_str()
80        .context("Invalid chars in model dir name")?;
81    Ok([graph_results_root, model_name.into()].iter().collect())
82}
83
84/// Create a new output directory for the model, optionally overwriting existing data
85///
86/// # Arguments
87///
88/// * `output_dir` - The output directory to create/overwrite
89/// * `allow_overwrite` - Whether to delete and recreate the folder if it is non-empty
90///
91/// # Returns
92///
93/// True if the output dir contained existing data that was deleted, false if not, or an error.
94pub fn create_output_directory(output_dir: &Path, allow_overwrite: bool) -> Result<bool> {
95    // If the folder already exists, then delete it
96    let overwrite = if let Ok(mut it) = fs::read_dir(output_dir) {
97        if it.next().is_none() {
98            // Folder exists and is empty: nothing to do
99            return Ok(false);
100        }
101
102        ensure!(
103            allow_overwrite,
104            "Output folder already exists and is not empty. \
105            Please delete the folder or pass the --overwrite command-line option."
106        );
107
108        fs::remove_dir_all(output_dir).context("Could not delete folder")?;
109        true
110    } else {
111        false
112    };
113
114    // Try to create the directory, with parents
115    fs::create_dir_all(output_dir)?;
116
117    Ok(overwrite)
118}
119
120/// Represents a row in the assets output CSV file.
121#[derive(Serialize, Deserialize, Debug, PartialEq)]
122struct AssetRow {
123    asset_id: AssetID,
124    process_id: ProcessID,
125    region_id: RegionID,
126    agent_id: AgentID,
127    group_id: Option<AssetGroupID>,
128    commission_year: u32,
129    decommission_year: Option<u32>,
130    capacity: Capacity,
131}
132
133impl AssetRow {
134    /// Create a new [`AssetRow`]
135    fn new(asset: &Asset) -> Self {
136        Self {
137            asset_id: asset.id().unwrap(),
138            process_id: asset.process_id().clone(),
139            region_id: asset.region_id().clone(),
140            agent_id: asset.agent_id().unwrap().clone(),
141            group_id: asset.group_id(),
142            commission_year: asset.commission_year(),
143            decommission_year: asset.decommission_year(),
144            capacity: asset.capacity().total_capacity(),
145        }
146    }
147}
148
149/// Represents the flow-related data in a row of the commodity flows CSV file.
150#[derive(Serialize, Deserialize, Debug, PartialEq)]
151struct CommodityFlowRow {
152    milestone_year: u32,
153    asset_id: AssetID,
154    commodity_id: CommodityID,
155    time_slice: TimeSliceID,
156    flow: Flow,
157}
158
159/// Represents a row in the commodity prices CSV file
160#[derive(Serialize, Deserialize, Debug, PartialEq)]
161struct CommodityPriceRow {
162    milestone_year: u32,
163    commodity_id: CommodityID,
164    region_id: RegionID,
165    time_slice: TimeSliceID,
166    price: MoneyPerFlow,
167}
168
169/// Represents the activity in a row of the activity CSV file
170#[derive(Serialize, Deserialize, Debug, PartialEq)]
171struct ActivityRow {
172    milestone_year: u32,
173    run_description: String,
174    asset_id: Option<AssetID>,
175    process_id: ProcessID,
176    region_id: RegionID,
177    time_slice: TimeSliceID,
178    activity: Option<Activity>,
179    activity_dual: Option<MoneyPerActivity>,
180    column_dual: Option<MoneyPerActivity>,
181}
182
183/// Represents the commodity balance duals data in a row of the commodity balance duals CSV file
184#[derive(Serialize, Deserialize, Debug, PartialEq)]
185struct CommodityBalanceDualsRow {
186    milestone_year: u32,
187    run_description: String,
188    commodity_id: CommodityID,
189    region_id: RegionID,
190    time_slice: TimeSliceID,
191    value: MoneyPerFlow,
192}
193
194/// Represents the unmet demand data in a row of the unmet demand CSV file
195#[derive(Serialize, Deserialize, Debug, PartialEq)]
196struct UnmetDemandRow {
197    milestone_year: u32,
198    run_description: String,
199    commodity_id: CommodityID,
200    region_id: RegionID,
201    time_slice: TimeSliceID,
202    value: Flow,
203}
204
205/// Represents solver output values
206#[derive(Serialize, Deserialize, Debug, PartialEq)]
207struct SolverValuesRow {
208    milestone_year: u32,
209    run_description: String,
210    objective_value: Money,
211}
212
213/// Represents the appraisal results in a row of the appraisal results CSV file
214#[derive(Serialize, Deserialize, Debug, PartialEq)]
215struct AppraisalResultsRow {
216    milestone_year: u32,
217    run_description: String,
218    asset_id: Option<AssetID>,
219    process_id: ProcessID,
220    region_id: RegionID,
221    capacity: Capacity,
222    capacity_coefficient: MoneyPerCapacity,
223    metric: f64,
224}
225
226/// Represents the appraisal results in a row of the appraisal results CSV file
227#[derive(Serialize, Deserialize, Debug, PartialEq)]
228struct AppraisalResultsTimeSliceRow {
229    milestone_year: u32,
230    run_description: String,
231    asset_id: Option<AssetID>,
232    process_id: ProcessID,
233    region_id: RegionID,
234    time_slice: TimeSliceID,
235    activity: Activity,
236    activity_coefficient: MoneyPerActivity,
237    demand: Flow,
238    unmet_demand: Flow,
239}
240
241/// For writing extra debug information about the model
242struct DebugDataWriter {
243    context: Option<String>,
244    commodity_balance_duals_writer: csv::Writer<File>,
245    unmet_demand_writer: csv::Writer<File>,
246    solver_values_writer: csv::Writer<File>,
247    appraisal_results_writer: csv::Writer<File>,
248    appraisal_results_time_slice_writer: csv::Writer<File>,
249    dispatch_asset_writer: csv::Writer<File>,
250}
251
252impl DebugDataWriter {
253    /// Open CSV files to write debug info to
254    ///
255    /// # Arguments
256    ///
257    /// * `output_path` - Folder where files will be saved
258    fn create(output_path: &Path) -> Result<Self> {
259        let new_writer = |file_name| {
260            let file_path = output_path.join(file_name);
261            csv::Writer::from_path(file_path)
262        };
263
264        Ok(Self {
265            context: None,
266            commodity_balance_duals_writer: new_writer(COMMODITY_BALANCE_DUALS_FILE_NAME)?,
267            unmet_demand_writer: new_writer(UNMET_DEMAND_FILE_NAME)?,
268            solver_values_writer: new_writer(SOLVER_VALUES_FILE_NAME)?,
269            appraisal_results_writer: new_writer(APPRAISAL_RESULTS_FILE_NAME)?,
270            appraisal_results_time_slice_writer: new_writer(
271                APPRAISAL_RESULTS_TIME_SLICE_FILE_NAME,
272            )?,
273            dispatch_asset_writer: new_writer(ACTIVITY_ASSET_DISPATCH)?,
274        })
275    }
276
277    /// Prepend the current context to the run description
278    fn with_context(&self, run_description: &str) -> String {
279        if let Some(context) = &self.context {
280            format!("{context}; {run_description}")
281        } else {
282            run_description.to_string()
283        }
284    }
285
286    /// Write debug info about the dispatch optimisation
287    fn write_dispatch_debug_info(
288        &mut self,
289        milestone_year: u32,
290        run_description: &str,
291        solution: &Solution,
292    ) -> Result<()> {
293        self.write_activity(
294            milestone_year,
295            run_description,
296            solution.iter_activity(),
297            solution.iter_activity_duals(),
298            solution.iter_column_duals(),
299        )?;
300        self.write_commodity_balance_duals(
301            milestone_year,
302            run_description,
303            solution.iter_commodity_balance_duals(),
304        )?;
305        self.write_unmet_demand(
306            milestone_year,
307            run_description,
308            solution.iter_unmet_demand(),
309        )?;
310        self.write_solver_values(milestone_year, run_description, solution.objective_value)?;
311        Ok(())
312    }
313
314    // Write activity to file
315    fn write_activity<'a, I, J, K>(
316        &mut self,
317        milestone_year: u32,
318        run_description: &str,
319        iter_activity: I,
320        iter_activity_duals: J,
321        iter_column_duals: K,
322    ) -> Result<()>
323    where
324        I: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, Activity)>,
325        J: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, MoneyPerActivity)>,
326        K: Iterator<Item = (&'a AssetRef, &'a TimeSliceID, MoneyPerActivity)>,
327    {
328        // To account for different order of entries or missing ones, we first compile data in hash map
329        type CompiledActivityData = (
330            Option<Activity>,
331            Option<MoneyPerActivity>,
332            Option<MoneyPerActivity>,
333        );
334        let mut map: IndexMap<(&AssetRef, &TimeSliceID), CompiledActivityData> = IndexMap::new();
335
336        // For the activities
337        for (asset, time_slice, activity) in iter_activity {
338            map.entry((asset, time_slice)).or_default().0 = Some(activity);
339        }
340        // The activity duals
341        for (asset, time_slice, activity_dual) in iter_activity_duals {
342            map.entry((asset, time_slice)).or_default().1 = Some(activity_dual);
343        }
344        // And the column duals
345        for (asset, time_slice, column_dual) in iter_column_duals {
346            map.entry((asset, time_slice)).or_default().2 = Some(column_dual);
347        }
348
349        for (asset, time_slice, activity, activity_dual, column_dual) in
350            map.iter()
351                .map(|(&(agent, ts), &(activity, activity_dual, column_dual))| {
352                    (agent, ts, activity, activity_dual, column_dual)
353                })
354        {
355            let row = ActivityRow {
356                milestone_year,
357                run_description: self.with_context(run_description),
358                asset_id: asset.id(),
359                process_id: asset.process_id().clone(),
360                region_id: asset.region_id().clone(),
361                time_slice: time_slice.clone(),
362                activity,
363                activity_dual,
364                column_dual,
365            };
366            self.dispatch_asset_writer.serialize(row)?;
367        }
368
369        Ok(())
370    }
371
372    /// Write commodity balance duals to file
373    fn write_commodity_balance_duals<'a, I>(
374        &mut self,
375        milestone_year: u32,
376        run_description: &str,
377        iter: I,
378    ) -> Result<()>
379    where
380        I: Iterator<Item = (&'a CommodityID, &'a RegionID, &'a TimeSliceID, MoneyPerFlow)>,
381    {
382        for (commodity_id, region_id, time_slice, value) in iter {
383            let row = CommodityBalanceDualsRow {
384                milestone_year,
385                run_description: self.with_context(run_description),
386                commodity_id: commodity_id.clone(),
387                region_id: region_id.clone(),
388                time_slice: time_slice.clone(),
389                value,
390            };
391            self.commodity_balance_duals_writer.serialize(row)?;
392        }
393
394        Ok(())
395    }
396
397    /// Write unmet demand values to file
398    fn write_unmet_demand<'a, I>(
399        &mut self,
400        milestone_year: u32,
401        run_description: &str,
402        iter: I,
403    ) -> Result<()>
404    where
405        I: Iterator<Item = (&'a CommodityID, &'a RegionID, &'a TimeSliceID, Flow)>,
406    {
407        for (commodity_id, region_id, time_slice, value) in iter {
408            let row = UnmetDemandRow {
409                milestone_year,
410                run_description: self.with_context(run_description),
411                commodity_id: commodity_id.clone(),
412                region_id: region_id.clone(),
413                time_slice: time_slice.clone(),
414                value,
415            };
416            self.unmet_demand_writer.serialize(row)?;
417        }
418
419        Ok(())
420    }
421
422    /// Write additional solver output values to file
423    fn write_solver_values(
424        &mut self,
425        milestone_year: u32,
426        run_description: &str,
427        objective_value: Money,
428    ) -> Result<()> {
429        let row = SolverValuesRow {
430            milestone_year,
431            run_description: self.with_context(run_description),
432            objective_value,
433        };
434        self.solver_values_writer.serialize(row)?;
435        self.solver_values_writer.flush()?;
436
437        Ok(())
438    }
439
440    /// Write appraisal results to file
441    fn write_appraisal_results(
442        &mut self,
443        milestone_year: u32,
444        run_description: &str,
445        appraisal_results: &[AppraisalOutput],
446    ) -> Result<()> {
447        for result in appraisal_results {
448            let row = AppraisalResultsRow {
449                milestone_year,
450                run_description: self.with_context(run_description),
451                asset_id: result.asset.id(),
452                process_id: result.asset.process_id().clone(),
453                region_id: result.asset.region_id().clone(),
454                capacity: result.capacity.total_capacity(),
455                capacity_coefficient: result.coefficients.capacity_coefficient,
456                metric: result.metric.value(),
457            };
458            self.appraisal_results_writer.serialize(row)?;
459        }
460
461        Ok(())
462    }
463
464    /// Write appraisal results to file
465    fn write_appraisal_time_slice_results(
466        &mut self,
467        milestone_year: u32,
468        run_description: &str,
469        appraisal_results: &[AppraisalOutput],
470    ) -> Result<()> {
471        for result in appraisal_results {
472            for (time_slice, activity) in &result.activity {
473                let activity_coefficient = result.coefficients.activity_coefficients[time_slice];
474                let demand = result.demand[time_slice];
475                let unmet_demand = result.unmet_demand[time_slice];
476                let row = AppraisalResultsTimeSliceRow {
477                    milestone_year,
478                    run_description: self.with_context(run_description),
479                    asset_id: result.asset.id(),
480                    process_id: result.asset.process_id().clone(),
481                    region_id: result.asset.region_id().clone(),
482                    time_slice: time_slice.clone(),
483                    activity: *activity,
484                    activity_coefficient,
485                    demand,
486                    unmet_demand,
487                };
488                self.appraisal_results_time_slice_writer.serialize(row)?;
489            }
490        }
491
492        Ok(())
493    }
494
495    /// Flush the underlying streams
496    fn flush(&mut self) -> Result<()> {
497        self.commodity_balance_duals_writer.flush()?;
498        self.unmet_demand_writer.flush()?;
499        self.solver_values_writer.flush()?;
500        self.appraisal_results_writer.flush()?;
501        self.appraisal_results_time_slice_writer.flush()?;
502        self.dispatch_asset_writer.flush()?;
503
504        Ok(())
505    }
506}
507
508/// An object for writing commodity prices to file
509pub struct DataWriter {
510    assets_path: PathBuf,
511    flows_writer: csv::Writer<File>,
512    prices_writer: csv::Writer<File>,
513    debug_writer: Option<DebugDataWriter>,
514}
515
516impl DataWriter {
517    /// Open CSV files to write output data to
518    ///
519    /// # Arguments
520    ///
521    /// * `output_path` - Folder where files will be saved
522    /// * `model_path` - Path to input model
523    /// * `save_debug_info` - Whether to include extra CSV files for debugging model
524    pub fn create(output_path: &Path, model_path: &Path, save_debug_info: bool) -> Result<Self> {
525        write_metadata(output_path, model_path).context("Failed to save metadata")?;
526
527        let new_writer = |file_name| {
528            let file_path = output_path.join(file_name);
529            csv::Writer::from_path(file_path)
530        };
531
532        let debug_writer = if save_debug_info {
533            // Create debug CSV files
534            Some(DebugDataWriter::create(output_path)?)
535        } else {
536            None
537        };
538
539        Ok(Self {
540            assets_path: output_path.join(ASSETS_FILE_NAME),
541            flows_writer: new_writer(COMMODITY_FLOWS_FILE_NAME)?,
542            prices_writer: new_writer(COMMODITY_PRICES_FILE_NAME)?,
543            debug_writer,
544        })
545    }
546
547    /// Write debug info about the dispatch optimisation
548    pub fn write_dispatch_debug_info(
549        &mut self,
550        milestone_year: u32,
551        run_description: &str,
552        solution: &Solution,
553    ) -> Result<()> {
554        if let Some(wtr) = &mut self.debug_writer {
555            wtr.write_dispatch_debug_info(milestone_year, run_description, solution)?;
556        }
557
558        Ok(())
559    }
560
561    /// Write debug info about the investment appraisal
562    pub fn write_appraisal_debug_info(
563        &mut self,
564        milestone_year: u32,
565        run_description: &str,
566        appraisal_results: &[AppraisalOutput],
567    ) -> Result<()> {
568        if let Some(wtr) = &mut self.debug_writer {
569            wtr.write_appraisal_results(milestone_year, run_description, appraisal_results)?;
570            wtr.write_appraisal_time_slice_results(
571                milestone_year,
572                run_description,
573                appraisal_results,
574            )?;
575        }
576
577        Ok(())
578    }
579
580    /// Write assets to a CSV file.
581    ///
582    /// The whole file is written at once and is overwritten with subsequent invocations. This is
583    /// done so that partial results will be written in the case of errors and so that the user can
584    /// see the results while the simulation is still running.
585    ///
586    /// The file is sorted by asset ID.
587    ///
588    /// # Panics
589    ///
590    /// Panics if any of the assets has not yet been commissioned (decommissioned assets are fine).
591    pub fn write_assets<'a, I>(&mut self, assets: I) -> Result<()>
592    where
593        I: Iterator<Item = &'a AssetRef>,
594    {
595        let mut writer = csv::Writer::from_path(&self.assets_path)?;
596        for asset in assets.sorted() {
597            let row = AssetRow::new(asset);
598            writer.serialize(row)?;
599        }
600        writer.flush()?;
601
602        Ok(())
603    }
604
605    /// Write commodity flows to a CSV file
606    pub fn write_flows(&mut self, milestone_year: u32, flow_map: &FlowMap) -> Result<()> {
607        for ((asset, commodity_id, time_slice), flow) in flow_map {
608            let row = CommodityFlowRow {
609                milestone_year,
610                asset_id: asset.id().unwrap(),
611                commodity_id: commodity_id.clone(),
612                time_slice: time_slice.clone(),
613                flow: *flow,
614            };
615            self.flows_writer.serialize(row)?;
616        }
617
618        Ok(())
619    }
620
621    /// Write commodity prices to a CSV file
622    pub fn write_prices(&mut self, milestone_year: u32, prices: &CommodityPrices) -> Result<()> {
623        for (commodity_id, region_id, time_slice, price) in prices.iter() {
624            let row = CommodityPriceRow {
625                milestone_year,
626                commodity_id: commodity_id.clone(),
627                region_id: region_id.clone(),
628                time_slice: time_slice.clone(),
629                price,
630            };
631            self.prices_writer.serialize(row)?;
632        }
633
634        Ok(())
635    }
636
637    /// Flush the underlying streams
638    pub fn flush(&mut self) -> Result<()> {
639        self.flows_writer.flush()?;
640        self.prices_writer.flush()?;
641        if let Some(wtr) = &mut self.debug_writer {
642            wtr.flush()?;
643        }
644
645        Ok(())
646    }
647
648    /// Add context to the debug writer
649    pub fn set_debug_context(&mut self, context: String) {
650        if let Some(wtr) = &mut self.debug_writer {
651            wtr.context = Some(context);
652        }
653    }
654
655    /// Clear context from the debug writer
656    pub fn clear_debug_context(&mut self) {
657        if let Some(wtr) = &mut self.debug_writer {
658            wtr.context = None;
659        }
660    }
661}
662
663#[cfg(test)]
664mod tests {
665    use super::*;
666    use crate::asset::AssetPool;
667    use crate::fixture::{appraisal_output, asset, assets, commodity_id, region_id, time_slice};
668    use crate::simulation::investment::appraisal::AppraisalOutput;
669    use crate::time_slice::TimeSliceID;
670    use indexmap::indexmap;
671    use itertools::{Itertools, assert_equal};
672    use rstest::rstest;
673    use std::iter;
674    use tempfile::tempdir;
675
676    #[rstest]
677    fn write_assets(assets: AssetPool) {
678        let dir = tempdir().unwrap();
679
680        // Write an asset
681        {
682            let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
683            writer.write_assets(assets.iter_active()).unwrap();
684            writer.flush().unwrap();
685        }
686
687        // Read back and compare
688        let asset = assets.iter_active().next().unwrap();
689        let expected = AssetRow::new(asset);
690        let records: Vec<AssetRow> = csv::Reader::from_path(dir.path().join(ASSETS_FILE_NAME))
691            .unwrap()
692            .into_deserialize()
693            .try_collect()
694            .unwrap();
695        assert_equal(records, iter::once(expected));
696    }
697
698    #[rstest]
699    fn write_flows(assets: AssetPool, commodity_id: CommodityID, time_slice: TimeSliceID) {
700        let milestone_year = 2020;
701        let asset = assets.iter_active().next().unwrap();
702        let flow_map = indexmap! {
703            (asset.clone(), commodity_id.clone(), time_slice.clone()) => Flow(42.0)
704        };
705
706        // Write a flow
707        let dir = tempdir().unwrap();
708        {
709            let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
710            writer.write_flows(milestone_year, &flow_map).unwrap();
711            writer.flush().unwrap();
712        }
713
714        // Read back and compare
715        let expected = CommodityFlowRow {
716            milestone_year,
717            asset_id: asset.id().unwrap(),
718            commodity_id,
719            time_slice,
720            flow: Flow(42.0),
721        };
722        let records: Vec<CommodityFlowRow> =
723            csv::Reader::from_path(dir.path().join(COMMODITY_FLOWS_FILE_NAME))
724                .unwrap()
725                .into_deserialize()
726                .try_collect()
727                .unwrap();
728        assert_equal(records, iter::once(expected));
729    }
730
731    #[rstest]
732    fn write_prices(commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID) {
733        let milestone_year = 2020;
734        let price = MoneyPerFlow(42.0);
735        let mut prices = CommodityPrices::default();
736        prices.insert(&commodity_id, &region_id, &time_slice, price);
737
738        let dir = tempdir().unwrap();
739
740        // Write a price
741        {
742            let mut writer = DataWriter::create(dir.path(), dir.path(), false).unwrap();
743            writer.write_prices(milestone_year, &prices).unwrap();
744            writer.flush().unwrap();
745        }
746
747        // Read back and compare
748        let expected = CommodityPriceRow {
749            milestone_year,
750            commodity_id,
751            region_id,
752            time_slice,
753            price,
754        };
755        let records: Vec<CommodityPriceRow> =
756            csv::Reader::from_path(dir.path().join(COMMODITY_PRICES_FILE_NAME))
757                .unwrap()
758                .into_deserialize()
759                .try_collect()
760                .unwrap();
761        assert_equal(records, iter::once(expected));
762    }
763
764    #[rstest]
765    fn write_commodity_balance_duals(
766        commodity_id: CommodityID,
767        region_id: RegionID,
768        time_slice: TimeSliceID,
769    ) {
770        let milestone_year = 2020;
771        let run_description = "test_run".to_string();
772        let value = MoneyPerFlow(0.5);
773        let dir = tempdir().unwrap();
774
775        // Write commodity balance dual
776        {
777            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
778            writer
779                .write_commodity_balance_duals(
780                    milestone_year,
781                    &run_description,
782                    iter::once((&commodity_id, &region_id, &time_slice, value)),
783                )
784                .unwrap();
785            writer.flush().unwrap();
786        }
787
788        // Read back and compare
789        let expected = CommodityBalanceDualsRow {
790            milestone_year,
791            run_description,
792            commodity_id,
793            region_id,
794            time_slice,
795            value,
796        };
797        let records: Vec<CommodityBalanceDualsRow> =
798            csv::Reader::from_path(dir.path().join(COMMODITY_BALANCE_DUALS_FILE_NAME))
799                .unwrap()
800                .into_deserialize()
801                .try_collect()
802                .unwrap();
803        assert_equal(records, iter::once(expected));
804    }
805
806    #[rstest]
807    fn write_unmet_demand(commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID) {
808        let milestone_year = 2020;
809        let run_description = "test_run".to_string();
810        let value = Flow(0.5);
811        let dir = tempdir().unwrap();
812
813        // Write unmet demand
814        {
815            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
816            writer
817                .write_unmet_demand(
818                    milestone_year,
819                    &run_description,
820                    iter::once((&commodity_id, &region_id, &time_slice, value)),
821                )
822                .unwrap();
823            writer.flush().unwrap();
824        }
825
826        // Read back and compare
827        let expected = UnmetDemandRow {
828            milestone_year,
829            run_description,
830            commodity_id,
831            region_id,
832            time_slice,
833            value,
834        };
835        let records: Vec<UnmetDemandRow> =
836            csv::Reader::from_path(dir.path().join(UNMET_DEMAND_FILE_NAME))
837                .unwrap()
838                .into_deserialize()
839                .try_collect()
840                .unwrap();
841        assert_equal(records, iter::once(expected));
842    }
843
844    #[rstest]
845    fn write_activity(assets: AssetPool, time_slice: TimeSliceID) {
846        let milestone_year = 2020;
847        let run_description = "test_run".to_string();
848        let activity = Activity(100.5);
849        let activity_dual = MoneyPerActivity(-1.5);
850        let column_dual = MoneyPerActivity(5.0);
851        let dir = tempdir().unwrap();
852        let asset = assets.iter_active().next().unwrap();
853
854        // Write activity
855        {
856            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
857            writer
858                .write_activity(
859                    milestone_year,
860                    &run_description,
861                    iter::once((asset, &time_slice, activity)),
862                    iter::once((asset, &time_slice, activity_dual)),
863                    iter::once((asset, &time_slice, column_dual)),
864                )
865                .unwrap();
866            writer.flush().unwrap();
867        }
868
869        // Read back and compare
870        let expected = ActivityRow {
871            milestone_year,
872            run_description,
873            asset_id: asset.id(),
874            process_id: asset.process_id().clone(),
875            region_id: asset.region_id().clone(),
876            time_slice,
877            activity: Some(activity),
878            activity_dual: Some(activity_dual),
879            column_dual: Some(column_dual),
880        };
881        let records: Vec<ActivityRow> =
882            csv::Reader::from_path(dir.path().join(ACTIVITY_ASSET_DISPATCH))
883                .unwrap()
884                .into_deserialize()
885                .try_collect()
886                .unwrap();
887        assert_equal(records, iter::once(expected));
888    }
889
890    #[rstest]
891    fn write_activity_with_missing_keys(assets: AssetPool, time_slice: TimeSliceID) {
892        let milestone_year = 2020;
893        let run_description = "test_run".to_string();
894        let activity = Activity(100.5);
895        let dir = tempdir().unwrap();
896        let asset = assets.iter_active().next().unwrap();
897
898        // Write activity
899        {
900            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
901            writer
902                .write_activity(
903                    milestone_year,
904                    &run_description,
905                    iter::once((asset, &time_slice, activity)),
906                    iter::empty::<(&AssetRef, &TimeSliceID, MoneyPerActivity)>(),
907                    iter::empty::<(&AssetRef, &TimeSliceID, MoneyPerActivity)>(),
908                )
909                .unwrap();
910            writer.flush().unwrap();
911        }
912
913        // Read back and compare
914        let expected = ActivityRow {
915            milestone_year,
916            run_description,
917            asset_id: asset.id(),
918            process_id: asset.process_id().clone(),
919            region_id: asset.region_id().clone(),
920            time_slice,
921            activity: Some(activity),
922            activity_dual: None,
923            column_dual: None,
924        };
925        let records: Vec<ActivityRow> =
926            csv::Reader::from_path(dir.path().join(ACTIVITY_ASSET_DISPATCH))
927                .unwrap()
928                .into_deserialize()
929                .try_collect()
930                .unwrap();
931        assert_equal(records, iter::once(expected));
932    }
933
934    #[rstest]
935    fn write_solver_values() {
936        let milestone_year = 2020;
937        let run_description = "test_run".to_string();
938        let objective_value = Money(1234.56);
939        let dir = tempdir().unwrap();
940
941        // Write solver values
942        {
943            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
944            writer
945                .write_solver_values(milestone_year, &run_description, objective_value)
946                .unwrap();
947            writer.flush().unwrap();
948        }
949
950        // Read back and compare
951        let expected = SolverValuesRow {
952            milestone_year,
953            run_description,
954            objective_value,
955        };
956        let records: Vec<SolverValuesRow> =
957            csv::Reader::from_path(dir.path().join(SOLVER_VALUES_FILE_NAME))
958                .unwrap()
959                .into_deserialize()
960                .try_collect()
961                .unwrap();
962        assert_equal(records, iter::once(expected));
963    }
964
965    #[rstest]
966    fn write_appraisal_results(asset: Asset, appraisal_output: AppraisalOutput) {
967        let milestone_year = 2020;
968        let run_description = "test_run".to_string();
969        let dir = tempdir().unwrap();
970
971        // Write appraisal results
972        {
973            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
974            writer
975                .write_appraisal_results(milestone_year, &run_description, &[appraisal_output])
976                .unwrap();
977            writer.flush().unwrap();
978        }
979
980        // Read back and compare
981        let expected = AppraisalResultsRow {
982            milestone_year,
983            run_description,
984            asset_id: None,
985            process_id: asset.process_id().clone(),
986            region_id: asset.region_id().clone(),
987            capacity: Capacity(42.0),
988            capacity_coefficient: MoneyPerCapacity(2.14),
989            metric: 4.14,
990        };
991        let records: Vec<AppraisalResultsRow> =
992            csv::Reader::from_path(dir.path().join(APPRAISAL_RESULTS_FILE_NAME))
993                .unwrap()
994                .into_deserialize()
995                .try_collect()
996                .unwrap();
997        assert_equal(records, iter::once(expected));
998    }
999
1000    #[rstest]
1001    fn write_appraisal_time_slice_results(
1002        asset: Asset,
1003        appraisal_output: AppraisalOutput,
1004        time_slice: TimeSliceID,
1005    ) {
1006        let milestone_year = 2020;
1007        let run_description = "test_run".to_string();
1008        let dir = tempdir().unwrap();
1009
1010        // Write appraisal time slice results
1011        {
1012            let mut writer = DebugDataWriter::create(dir.path()).unwrap();
1013            writer
1014                .write_appraisal_time_slice_results(
1015                    milestone_year,
1016                    &run_description,
1017                    &[appraisal_output],
1018                )
1019                .unwrap();
1020            writer.flush().unwrap();
1021        }
1022
1023        // Read back and compare
1024        let expected = AppraisalResultsTimeSliceRow {
1025            milestone_year,
1026            run_description,
1027            asset_id: None,
1028            process_id: asset.process_id().clone(),
1029            region_id: asset.region_id().clone(),
1030            time_slice: time_slice.clone(),
1031            activity: Activity(10.0),
1032            activity_coefficient: MoneyPerActivity(0.5),
1033            demand: Flow(100.0),
1034            unmet_demand: Flow(5.0),
1035        };
1036        let records: Vec<AppraisalResultsTimeSliceRow> =
1037            csv::Reader::from_path(dir.path().join(APPRAISAL_RESULTS_TIME_SLICE_FILE_NAME))
1038                .unwrap()
1039                .into_deserialize()
1040                .try_collect()
1041                .unwrap();
1042        assert_equal(records, iter::once(expected));
1043    }
1044
1045    #[test]
1046    fn create_output_directory_new_directory() {
1047        let temp_dir = tempdir().unwrap();
1048        let output_dir = temp_dir.path().join("new_output");
1049
1050        // Create a new directory should succeed and return false (no overwrite)
1051        let result = create_output_directory(&output_dir, false).unwrap();
1052        assert!(!result);
1053        assert!(output_dir.exists());
1054        assert!(output_dir.is_dir());
1055    }
1056
1057    #[test]
1058    fn create_output_directory_existing_empty_directory() {
1059        let temp_dir = tempdir().unwrap();
1060        let output_dir = temp_dir.path().join("empty_output");
1061
1062        // Create the directory first
1063        fs::create_dir(&output_dir).unwrap();
1064
1065        // Creating again should succeed and return false (no overwrite needed)
1066        let result = create_output_directory(&output_dir, false).unwrap();
1067        assert!(!result);
1068        assert!(output_dir.exists());
1069        assert!(output_dir.is_dir());
1070    }
1071
1072    #[test]
1073    fn create_output_directory_existing_with_files_no_overwrite() {
1074        let temp_dir = tempdir().unwrap();
1075        let output_dir = temp_dir.path().join("output_with_files");
1076
1077        // Create directory with a file
1078        fs::create_dir(&output_dir).unwrap();
1079        fs::write(output_dir.join("existing_file.txt"), "some content").unwrap();
1080
1081        // Should fail when allow_overwrite is false
1082        let result = create_output_directory(&output_dir, false);
1083        assert!(result.is_err());
1084        assert!(
1085            result
1086                .unwrap_err()
1087                .to_string()
1088                .contains("Output folder already exists")
1089        );
1090    }
1091
1092    #[test]
1093    fn create_output_directory_existing_with_files_allow_overwrite() {
1094        let temp_dir = tempdir().unwrap();
1095        let output_dir = temp_dir.path().join("output_with_files");
1096
1097        // Create directory with a file
1098        fs::create_dir(&output_dir).unwrap();
1099        let file_path = output_dir.join("existing_file.txt");
1100        fs::write(&file_path, "some content").unwrap();
1101
1102        // Should succeed when allow_overwrite is true and return true (overwrite occurred)
1103        let result = create_output_directory(&output_dir, true).unwrap();
1104        assert!(result);
1105        assert!(output_dir.exists());
1106        assert!(output_dir.is_dir());
1107        assert!(!file_path.exists()); // File should be gone
1108    }
1109
1110    #[test]
1111    fn create_output_directory_nested_path() {
1112        let temp_dir = tempdir().unwrap();
1113        let output_dir = temp_dir.path().join("nested").join("path").join("output");
1114
1115        // Should create nested directories and return false (no overwrite)
1116        let result = create_output_directory(&output_dir, false).unwrap();
1117        assert!(!result);
1118        assert!(output_dir.exists());
1119        assert!(output_dir.is_dir());
1120    }
1121
1122    #[test]
1123    fn create_output_directory_existing_subdirs_with_files_allow_overwrite() {
1124        let temp_dir = tempdir().unwrap();
1125        let output_dir = temp_dir.path().join("output_with_subdirs");
1126
1127        // Create directory structure with files
1128        fs::create_dir_all(output_dir.join("subdir")).unwrap();
1129        fs::write(output_dir.join("file1.txt"), "content1").unwrap();
1130        fs::write(output_dir.join("subdir").join("file2.txt"), "content2").unwrap();
1131
1132        // Should succeed when allow_overwrite is true and return true (overwrite occurred)
1133        let result = create_output_directory(&output_dir, true).unwrap();
1134        assert!(result);
1135        assert!(output_dir.exists());
1136        assert!(output_dir.is_dir());
1137        // All previous content should be gone
1138        assert!(!output_dir.join("file1.txt").exists());
1139        assert!(!output_dir.join("subdir").exists());
1140    }
1141}