Skip to content

Commit

Permalink
Merge branch 'main' into log-to-file
Browse files Browse the repository at this point in the history
  • Loading branch information
alexdewar authored Jan 31, 2025
2 parents 1fb33fb + 03e78b3 commit 0880250
Show file tree
Hide file tree
Showing 11 changed files with 835 additions and 163 deletions.
510 changes: 374 additions & 136 deletions Cargo.lock

Large diffs are not rendered by default.

3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,6 @@ fern = {version = "0.7.1", features = ["chrono", "colored"]}
chrono = "0.4"
clap = {version = "4.5.27", features = ["cargo", "derive"]}
include_dir = "0.7.4"
highs = "1.6.1"
indexmap = "2.7.1"
human-panic = "2.0.2"
2 changes: 2 additions & 0 deletions src/agent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,8 @@ pub enum ObjectiveType {
/// An asset controlled by an agent.
#[derive(Clone, Debug, PartialEq)]
pub struct Asset {
/// A unique identifier for the asset
pub id: u32,
/// A unique identifier for the agent
pub agent_id: Rc<str>,
/// The [`Process`] that this asset corresponds to
Expand Down
2 changes: 1 addition & 1 deletion src/commands.rs
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ pub fn handle_run_command(model_dir: &PathBuf) -> Result<()> {
info!("Output directory created: {}", output_path.display());
let (model, assets) = load_model(model_dir).context("Failed to load model.")?;
info!("Model loaded successfully.");
crate::simulation::run(&model, &assets);
crate::simulation::run(model, assets);
Ok(())
}

Expand Down
13 changes: 11 additions & 2 deletions src/input/asset.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ fn read_assets_from_iter<I>(
where
I: Iterator<Item = AssetRaw>,
{
let mut id = 0u32;

iter.map(|asset| -> Result<_> {
let agent_id = agent_ids.get_id(&asset.agent_id)?;
let process = processes
Expand All @@ -78,13 +80,19 @@ where
process.id
);

Ok(Asset {
let asset = Asset {
id,
agent_id,
process: Rc::clone(process),
region_id,
capacity: asset.capacity,
commission_year: asset.commission_year,
})
};

// Increment ID for next asset
id += 1;

Ok(asset)
})
.try_collect()
}
Expand Down Expand Up @@ -132,6 +140,7 @@ mod tests {
commission_year: 2010,
};
let asset_out = Asset {
id: 0,
agent_id: "agent1".into(),
process: Rc::clone(&process),
region_id: "GBR".into(),
Expand Down
142 changes: 141 additions & 1 deletion src/input/process.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
//! Code for reading process-related information from CSV files.
use crate::commodity::Commodity;
use crate::commodity::{Commodity, CommodityType};
use crate::input::*;
use crate::process::{Process, ProcessCapacityMap, ProcessFlow, ProcessParameter};
use crate::region::RegionSelection;
Expand All @@ -17,6 +17,7 @@ use flow::read_process_flows;
pub mod parameter;
use parameter::read_process_parameters;
pub mod region;
use anyhow::bail;
use region::read_process_regions;

const PROCESSES_FILE_NAME: &str = "processes.csv";
Expand Down Expand Up @@ -68,6 +69,9 @@ pub fn read_processes(
let parameters = read_process_parameters(model_dir, &process_ids, year_range)?;
let regions = read_process_regions(model_dir, &process_ids, region_ids)?;

// Validate commodities after the flows have been read
validate_commodities(commodities, &flows)?;

create_process_map(
descriptions.into_values(),
availabilities,
Expand All @@ -77,6 +81,47 @@ pub fn read_processes(
)
}

/// Perform consistency checks for commodity flows.
fn validate_commodities(
commodities: &HashMap<Rc<str>, Rc<Commodity>>,
flows: &HashMap<Rc<str>, Vec<ProcessFlow>>,
) -> anyhow::Result<()> {
for (commodity_id, commodity) in commodities {
if commodity.kind == CommodityType::SupplyEqualsDemand {
validate_sed_commodity(commodity_id, commodity, flows)?;
}
}
Ok(())
}

fn validate_sed_commodity(
commodity_id: &Rc<str>,
commodity: &Rc<Commodity>,
flows: &HashMap<Rc<str>, Vec<ProcessFlow>>,
) -> Result<()> {
let mut has_producer = false;
let mut has_consumer = false;

for flow in flows.values().flatten() {
if Rc::ptr_eq(&flow.commodity, commodity) {
if flow.flow > 0.0 {
has_producer = true;
} else if flow.flow < 0.0 {
has_consumer = true;
}

if has_producer && has_consumer {
return Ok(());
}
}
}

bail!(
"Commodity {} of 'SED' type must have both producer and consumer processes",
commodity_id
);
}

fn create_process_map<I>(
descriptions: I,
mut availabilities: HashMap<Rc<str>, ProcessCapacityMap>,
Expand Down Expand Up @@ -119,6 +164,10 @@ where

#[cfg(test)]
mod tests {
use crate::commodity::{CommodityCostMap, DemandMap};
use crate::process::FlowType;
use crate::time_slice::TimeSliceLevel;

use super::*;

struct ProcessData {
Expand Down Expand Up @@ -232,4 +281,95 @@ mod tests {
fn test_create_process_map_missing_parameters() {
test_missing!(parameters);
}

#[test]
fn test_validate_commodities() {
// Create mock commodities
let commodity_sed = Rc::new(Commodity {
id: "commodity_sed".into(),
description: "SED commodity".into(),
kind: CommodityType::SupplyEqualsDemand,
time_slice_level: TimeSliceLevel::Annual,
costs: CommodityCostMap::new(),
demand: DemandMap::new(),
});

let commodity_non_sed = Rc::new(Commodity {
id: "commodity_non_sed".into(),
description: "Non-SED commodity".into(),
kind: CommodityType::ServiceDemand,
time_slice_level: TimeSliceLevel::Annual,
costs: CommodityCostMap::new(),
demand: DemandMap::new(),
});

let commodities: HashMap<Rc<str>, Rc<Commodity>> = [
(Rc::clone(&commodity_sed.id), Rc::clone(&commodity_sed)),
(
Rc::clone(&commodity_non_sed.id),
Rc::clone(&commodity_non_sed),
),
]
.into_iter()
.collect();

// Create mock flows
let process_flows: HashMap<Rc<str>, Vec<ProcessFlow>> = [
(
"process1".into(),
vec![
ProcessFlow {
process_id: "process1".into(),
commodity: Rc::clone(&commodity_sed),
flow: 10.0,
flow_type: FlowType::Fixed,
flow_cost: 1.0,
is_pac: false,
},
ProcessFlow {
process_id: "process1".into(),
commodity: Rc::clone(&commodity_non_sed),
flow: -5.0,
flow_type: FlowType::Fixed,
flow_cost: 1.0,
is_pac: false,
},
],
),
(
"process2".into(),
vec![ProcessFlow {
process_id: "process2".into(),
commodity: Rc::clone(&commodity_sed),
flow: -10.0,
flow_type: FlowType::Fixed,
flow_cost: 1.0,
is_pac: false,
}],
),
]
.into_iter()
.collect();

// Validate commodities
assert!(validate_commodities(&commodities, &process_flows).is_ok());

// Modify flows to make the validation fail
let process_flows_invalid: HashMap<Rc<str>, Vec<ProcessFlow>> = [(
"process1".into(),
vec![ProcessFlow {
process_id: "process1".into(),
commodity: Rc::clone(&commodity_sed),
flow: 10.0,
flow_type: FlowType::Fixed,
flow_cost: 1.0,
is_pac: false,
}],
)]
.into_iter()
.collect();

// Validate commodities should fail
assert!(validate_commodities(&commodities, &process_flows_invalid).is_err());
}
}
6 changes: 6 additions & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,17 @@
use clap::Parser;
use human_panic::{metadata, setup_panic};
use muse2::commands;

use commands::{
handle_example_list_command, handle_run_command, Cli, Commands, ExampleSubcommands,
};

fn main() {
setup_panic!(metadata!().support(format!(
"Open an issue on Github: {}/issues/new?template=bug_report.md",
env!("CARGO_PKG_REPOSITORY")
)));

let cli = Cli::parse();
match cli.command {
Commands::Run { model_dir } => handle_run_command(&model_dir),
Expand Down
51 changes: 28 additions & 23 deletions src/simulation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,40 +2,45 @@
use crate::agent::{Asset, AssetPool};
use crate::model::Model;
use log::info;
use std::collections::HashMap;
use std::rc::Rc;

/// Get an iterator of active [`Asset`]s for the specified milestone year in a given region.
fn filter_assets<'a>(
assets: &'a AssetPool,
year: u32,
region_id: &'a Rc<str>,
) -> impl Iterator<Item = &'a Asset> {
assets
.iter()
.filter(move |asset| asset.commission_year >= year && asset.region_id == *region_id)
}
pub mod optimisation;
use optimisation::perform_dispatch_optimisation;
pub mod investment;
use investment::perform_agent_investment;
pub mod update;
use update::{update_commodity_flows, update_commodity_prices};

/// A map relating commodity ID to current price (endogenous)
pub type CommodityPrices = HashMap<Rc<str>, f64>;

/// Run the simulation.
///
/// # Arguments:
///
/// * `model` - The model to run
/// * `assets` - The asset pool
pub fn run(model: &Model, assets: &AssetPool) {
pub fn run(model: Model, mut assets: AssetPool) {
// Commodity prices (endogenous)
let mut prices = CommodityPrices::new();

for year in model.iter_years() {
info!("Milestone year: {year}");
for region_id in model.iter_regions() {
info!("├── Region: {region_id}");
for asset in filter_assets(assets, year, region_id) {
info!(
"│ ├── Agent {} has asset {} (commissioned in {})",
asset.agent_id, asset.process.id, asset.commission_year
);

for flow in asset.process.flows.iter() {
info!("│ │ ├── Commodity: {}", flow.commodity.id);
}
}
}
// Dispatch optimisation
let solution = perform_dispatch_optimisation(&model, &assets, year);
update_commodity_flows(&solution, &mut assets);
update_commodity_prices(&model.commodities, &solution, &mut prices);

// Agent investment
perform_agent_investment(&model, &mut assets);
}
}

/// Get an iterator of active [`Asset`]s for the specified milestone year.
pub fn filter_assets(assets: &AssetPool, year: u32) -> impl Iterator<Item = &Asset> {
assets
.iter()
.filter(move |asset| asset.commission_year <= year)
}
14 changes: 14 additions & 0 deletions src/simulation/investment.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
//! Code for performing agent investment.
use crate::agent::AssetPool;
use crate::model::Model;
use log::info;

/// Perform agent investment to determine capacity investment of new assets for next milestone year.
///
/// # Arguments
///
/// * `model` - The model
/// * `assets` - The asset pool
pub fn perform_agent_investment(_model: &Model, _assets: &mut AssetPool) {
info!("Performing agent investment...");
}
Loading

0 comments on commit 0880250

Please sign in to comment.