chore: sync project state and current artifacts

This commit is contained in:
Sepehr
2026-02-22 23:27:31 +01:00
parent 1b6415776e
commit dd77089b22
232 changed files with 37056 additions and 4296 deletions

338
crates/cli/src/batch.rs Normal file
View File

@@ -0,0 +1,338 @@
//! Batch execution module.
//!
//! Handles parallel execution of multiple simulation scenarios.
use std::path::{Path, PathBuf};
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
use indicatif::{ParallelProgressIterator, ProgressBar, ProgressStyle};
use rayon::prelude::*;
use serde::{Deserialize, Serialize};
use tracing::info;
use crate::error::{CliError, CliResult};
use crate::run::{run_simulation, SimulationResult, SimulationStatus};
/// Summary of batch execution.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BatchSummary {
/// Total number of scenarios processed.
pub total: usize,
/// Number of successful simulations.
pub succeeded: usize,
/// Number of failed simulations.
pub failed: usize,
/// Number of non-converged simulations.
pub non_converged: usize,
/// Total execution time in milliseconds.
pub total_elapsed_ms: u64,
/// Average execution time per scenario in milliseconds.
pub avg_elapsed_ms: f64,
/// Individual results.
pub results: Vec<SimulationResult>,
}
impl Default for BatchSummary {
fn default() -> Self {
Self {
total: 0,
succeeded: 0,
failed: 0,
non_converged: 0,
total_elapsed_ms: 0,
avg_elapsed_ms: 0.0,
results: Vec::new(),
}
}
}
/// Run batch simulations from a directory of configuration files.
pub fn run_batch(
directory: &Path,
parallel: usize,
output_dir: Option<&Path>,
quiet: bool,
verbose: bool,
) -> CliResult<BatchSummary> {
if !directory.exists() {
return Err(CliError::BatchDirNotFound(directory.to_path_buf()));
}
if !directory.is_dir() {
return Err(CliError::Config(format!(
"Path is not a directory: {}",
directory.display()
)));
}
let config_files = discover_config_files(directory)?;
if config_files.is_empty() {
return Err(CliError::NoConfigFiles(directory.to_path_buf()));
}
if verbose {
info!("Found {} configuration files", config_files.len());
info!("Running with {} parallel workers", parallel);
}
let start = std::time::Instant::now();
let total = config_files.len();
let pool = rayon::ThreadPoolBuilder::new()
.num_threads(parallel)
.build()
.map_err(|e| CliError::Simulation(format!("Failed to create thread pool: {}", e)))?;
let results: Vec<SimulationResult> = pool.install(|| {
if quiet {
config_files
.par_iter()
.map(|path| process_single_file(path, output_dir, verbose))
.collect()
} else {
let progress = ProgressBar::new(total as u64);
progress.set_style(
ProgressStyle::default_bar()
.template(
"{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos}/{len} {msg}",
)
.unwrap()
.progress_chars("=>-"),
);
let completed = Arc::new(AtomicUsize::new(0));
let errors = Arc::new(AtomicUsize::new(0));
let results: Vec<SimulationResult> = config_files
.par_iter()
.progress_with(progress)
.map(|path| {
let result = process_single_file(path, output_dir, verbose);
completed.fetch_add(1, Ordering::Relaxed);
if result.status == SimulationStatus::Error {
errors.fetch_add(1, Ordering::Relaxed);
}
result
})
.collect();
let comp_count = completed.load(Ordering::Relaxed);
let err_count = errors.load(Ordering::Relaxed);
println!();
println!(
"Completed: {} | Errors: {} | Elapsed: {:.2}s",
comp_count,
err_count,
start.elapsed().as_secs_f64()
);
results
}
});
let summary = build_summary(results, start.elapsed().as_millis() as u64);
if !quiet {
print_batch_summary(&summary);
}
Ok(summary)
}
/// Discover all JSON configuration files in a directory.
pub fn discover_config_files(directory: &Path) -> CliResult<Vec<PathBuf>> {
let mut files = Vec::new();
for entry in std::fs::read_dir(directory)? {
let entry = entry?;
let path = entry.path();
if path.extension().map_or(false, |ext| ext == "json") {
files.push(path);
}
}
files.sort();
Ok(files)
}
/// Process a single configuration file.
fn process_single_file(
config_path: &Path,
output_dir: Option<&Path>,
verbose: bool,
) -> SimulationResult {
let output_path = output_dir.map(|dir| {
let stem = config_path
.file_stem()
.unwrap_or_default()
.to_string_lossy();
dir.join(format!("{}_result.json", stem))
});
match run_simulation(config_path, output_path.as_deref(), verbose) {
Ok(result) => result,
Err(e) => {
let status = match e.exit_code() {
crate::error::ExitCode::ConfigError => SimulationStatus::Error,
crate::error::ExitCode::SimulationError => SimulationStatus::NonConverged,
_ => SimulationStatus::Error,
};
SimulationResult {
input: config_path.display().to_string(),
status,
convergence: None,
iterations: None,
state: None,
performance: None,
error: Some(e.to_string()),
elapsed_ms: 0,
}
}
}
}
/// Build a batch summary from individual results.
fn build_summary(results: Vec<SimulationResult>, total_elapsed_ms: u64) -> BatchSummary {
let total = results.len();
let succeeded = results
.iter()
.filter(|r| r.status == SimulationStatus::Converged)
.count();
let failed = results
.iter()
.filter(|r| r.status == SimulationStatus::Error)
.count();
let non_converged = results
.iter()
.filter(|r| {
r.status == SimulationStatus::NonConverged || r.status == SimulationStatus::Timeout
})
.count();
let total_time: u64 = results.iter().map(|r| r.elapsed_ms).sum();
let avg_time = if total > 0 {
total_time as f64 / total as f64
} else {
0.0
};
BatchSummary {
total,
succeeded,
failed,
non_converged,
total_elapsed_ms,
avg_elapsed_ms: avg_time,
results,
}
}
/// Print a formatted batch summary.
fn print_batch_summary(summary: &BatchSummary) {
use colored::Colorize;
println!();
println!("{}", "".repeat(60).cyan());
println!("{}", " BATCH EXECUTION SUMMARY".cyan().bold());
println!("{}", "".repeat(60).cyan());
println!();
println!(" Total scenarios: {}", summary.total);
println!(
" {} {:>15}",
"Succeeded:".green(),
summary.succeeded.to_string().green()
);
println!(
" {} {:>15}",
"Failed:".red(),
summary.failed.to_string().red()
);
println!(
" {} {:>13}",
"Non-converged:".yellow(),
summary.non_converged.to_string().yellow()
);
println!();
println!(
" Total time: {:.2} s",
summary.total_elapsed_ms as f64 / 1000.0
);
println!(" Avg time/scenario: {:.2} ms", summary.avg_elapsed_ms);
println!("{}", "".repeat(60).cyan());
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::tempdir;
#[test]
fn test_discover_config_files() {
let dir = tempdir().unwrap();
std::fs::write(dir.path().join("config1.json"), "{}").unwrap();
std::fs::write(dir.path().join("config2.json"), "{}").unwrap();
std::fs::write(dir.path().join("readme.txt"), "").unwrap();
let files = discover_config_files(dir.path()).unwrap();
assert_eq!(files.len(), 2);
assert!(files[0].ends_with("config1.json"));
assert!(files[1].ends_with("config2.json"));
}
#[test]
fn test_build_summary() {
let results = vec![
SimulationResult {
input: "test1.json".to_string(),
status: SimulationStatus::Converged,
convergence: None,
iterations: Some(10),
state: None,
performance: None,
error: None,
elapsed_ms: 50,
},
SimulationResult {
input: "test2.json".to_string(),
status: SimulationStatus::Error,
convergence: None,
iterations: None,
state: None,
performance: None,
error: Some("Error".to_string()),
elapsed_ms: 0,
},
];
let summary = build_summary(results, 100);
assert_eq!(summary.total, 2);
assert_eq!(summary.succeeded, 1);
assert_eq!(summary.failed, 1);
assert_eq!(summary.total_elapsed_ms, 100);
assert_eq!(summary.avg_elapsed_ms, 25.0);
}
#[test]
fn test_batch_summary_serialization() {
let summary = BatchSummary {
total: 10,
succeeded: 8,
failed: 1,
non_converged: 1,
total_elapsed_ms: 1000,
avg_elapsed_ms: 100.0,
results: vec![],
};
let json = serde_json::to_string_pretty(&summary).unwrap();
assert!(json.contains("\"total\": 10"));
assert!(json.contains("\"succeeded\": 8"));
}
}

345
crates/cli/src/config.rs Normal file
View File

@@ -0,0 +1,345 @@
//! Configuration parsing for CLI scenarios.
//!
//! This module defines the JSON schema for scenario configuration files
//! and provides utilities for loading and validating them.
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use crate::error::{CliError, CliResult};
/// Root configuration for a simulation scenario.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ScenarioConfig {
/// Scenario name.
#[serde(default)]
pub name: Option<String>,
/// Fluid name (e.g., "R134a", "R410A", "R744").
pub fluid: String,
/// Circuit configurations.
#[serde(default)]
pub circuits: Vec<CircuitConfig>,
/// Thermal couplings between circuits.
#[serde(default)]
pub thermal_couplings: Vec<ThermalCouplingConfig>,
/// Solver configuration.
#[serde(default)]
pub solver: SolverConfig,
/// Optional metadata.
#[serde(default)]
pub metadata: Option<HashMap<String, serde_json::Value>>,
}
/// Thermal coupling configuration between two circuits.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ThermalCouplingConfig {
/// Hot circuit ID.
pub hot_circuit: usize,
/// Cold circuit ID.
pub cold_circuit: usize,
/// Thermal conductance in W/K.
pub ua: f64,
/// Heat exchanger efficiency (0.0 to 1.0).
#[serde(default = "default_efficiency")]
pub efficiency: f64,
}
fn default_efficiency() -> f64 {
0.95
}
/// Configuration for a single circuit.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CircuitConfig {
/// Circuit ID (default: 0).
#[serde(default)]
pub id: usize,
/// Components in this circuit.
pub components: Vec<ComponentConfig>,
/// Edge connections between components.
#[serde(default)]
pub edges: Vec<EdgeConfig>,
/// Initial state for edges.
#[serde(default)]
pub initial_state: Option<InitialStateConfig>,
}
/// Configuration for a component.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ComponentConfig {
/// Component type (e.g., "Compressor", "Condenser", "Evaporator", "ExpansionValve", "HeatExchanger").
#[serde(rename = "type")]
pub component_type: String,
/// Component name for referencing in edges.
pub name: String,
/// Component-specific parameters.
#[serde(flatten)]
pub params: HashMap<String, serde_json::Value>,
}
/// Side conditions for a heat exchanger (hot or cold fluid).
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SideConditionsConfig {
/// Fluid name (e.g., "R134a", "Water", "Air").
pub fluid: String,
/// Inlet temperature in °C.
pub t_inlet_c: f64,
/// Pressure in bar.
#[serde(default = "default_pressure")]
pub pressure_bar: f64,
/// Mass flow rate in kg/s.
#[serde(default = "default_mass_flow")]
pub mass_flow_kg_s: f64,
}
fn default_pressure() -> f64 {
1.0
}
fn default_mass_flow() -> f64 {
0.1
}
/// Compressor AHRI 540 coefficients configuration.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Ahri540Config {
/// Flow coefficient M1.
pub m1: f64,
/// Pressure ratio exponent M2.
pub m2: f64,
/// Power coefficients M3-M6 (cooling) and M7-M10 (heating).
#[serde(default)]
pub m3: Option<f64>,
#[serde(default)]
pub m4: Option<f64>,
#[serde(default)]
pub m5: Option<f64>,
#[serde(default)]
pub m6: Option<f64>,
#[serde(default)]
pub m7: Option<f64>,
#[serde(default)]
pub m8: Option<f64>,
#[serde(default)]
pub m9: Option<f64>,
#[serde(default)]
pub m10: Option<f64>,
}
/// Configuration for an edge between components.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct EdgeConfig {
/// Source component and port (e.g., "comp1:outlet").
pub from: String,
/// Target component and port (e.g., "cond1:inlet").
pub to: String,
}
/// Initial state configuration.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InitialStateConfig {
/// Initial pressure in bar.
pub pressure_bar: Option<f64>,
/// Initial enthalpy in kJ/kg.
pub enthalpy_kj_kg: Option<f64>,
/// Initial temperature in Kelvin.
pub temperature_k: Option<f64>,
}
/// Solver configuration.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SolverConfig {
/// Solver strategy: "newton", "picard", or "fallback".
#[serde(default = "default_solver_strategy")]
pub strategy: String,
/// Maximum iterations.
#[serde(default = "default_max_iterations")]
pub max_iterations: usize,
/// Convergence tolerance.
#[serde(default = "default_tolerance")]
pub tolerance: f64,
/// Timeout in milliseconds (0 = no timeout).
#[serde(default)]
pub timeout_ms: u64,
/// Enable verbose output.
#[serde(default)]
pub verbose: bool,
}
fn default_solver_strategy() -> String {
"fallback".to_string()
}
fn default_max_iterations() -> usize {
100
}
fn default_tolerance() -> f64 {
1e-6
}
impl Default for SolverConfig {
fn default() -> Self {
Self {
strategy: default_solver_strategy(),
max_iterations: default_max_iterations(),
tolerance: default_tolerance(),
timeout_ms: 0,
verbose: false,
}
}
}
impl ScenarioConfig {
/// Load a scenario configuration from a file.
pub fn from_file(path: &std::path::Path) -> CliResult<Self> {
let content = std::fs::read_to_string(path).map_err(|e| {
if e.kind() == std::io::ErrorKind::NotFound {
CliError::ConfigNotFound(path.to_path_buf())
} else {
CliError::Io(e)
}
})?;
let config: Self = serde_json::from_str(&content).map_err(CliError::InvalidConfig)?;
config.validate()?;
Ok(config)
}
/// Load a scenario configuration from a JSON string.
pub fn from_json(json: &str) -> CliResult<Self> {
let config: Self = serde_json::from_str(json).map_err(CliError::InvalidConfig)?;
config.validate()?;
Ok(config)
}
/// Validate the configuration.
pub fn validate(&self) -> CliResult<()> {
if self.fluid.is_empty() {
return Err(CliError::Config("fluid field is required".to_string()));
}
for (i, circuit) in self.circuits.iter().enumerate() {
if circuit.components.is_empty() {
return Err(CliError::Config(format!("circuit {} has no components", i)));
}
let component_names: std::collections::HashSet<&str> =
circuit.components.iter().map(|c| c.name.as_str()).collect();
for edge in &circuit.edges {
let from_parts: Vec<&str> = edge.from.split(':').collect();
let to_parts: Vec<&str> = edge.to.split(':').collect();
if from_parts.len() != 2 || to_parts.len() != 2 {
return Err(CliError::Config(format!(
"invalid edge format '{} -> {}'. Expected 'component:port'",
edge.from, edge.to
)));
}
let from_component = from_parts[0];
let to_component = to_parts[0];
if !component_names.contains(from_component) {
return Err(CliError::Config(format!(
"edge references unknown component '{}' (in '{}'). Available: {}",
from_component,
edge.from,
component_names
.iter()
.cloned()
.collect::<Vec<_>>()
.join(", ")
)));
}
if !component_names.contains(to_component) {
return Err(CliError::Config(format!(
"edge references unknown component '{}' (in '{}'). Available: {}",
to_component,
edge.to,
component_names
.iter()
.cloned()
.collect::<Vec<_>>()
.join(", ")
)));
}
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_minimal_config() {
let json = r#"{ "fluid": "R134a" }"#;
let config = ScenarioConfig::from_json(json).unwrap();
assert_eq!(config.fluid, "R134a");
assert!(config.circuits.is_empty());
}
#[test]
fn test_parse_full_config() {
let json = r#"
{
"fluid": "R410A",
"circuits": [
{
"id": 0,
"components": [
{ "type": "Compressor", "name": "comp1", "ua": 5000.0 },
{ "type": "Condenser", "name": "cond1", "ua": 5000.0 }
],
"edges": [
{ "from": "comp1:outlet", "to": "cond1:inlet" }
],
"initial_state": {
"pressure_bar": 10.0,
"enthalpy_kj_kg": 400.0
}
}
],
"solver": {
"strategy": "newton",
"max_iterations": 50,
"tolerance": 1e-8
}
}"#;
let config = ScenarioConfig::from_json(json).unwrap();
assert_eq!(config.fluid, "R410A");
assert_eq!(config.circuits.len(), 1);
assert_eq!(config.circuits[0].components.len(), 2);
assert_eq!(config.solver.strategy, "newton");
}
#[test]
fn test_validate_missing_fluid() {
let json = r#"{ "fluid": "" }"#;
let result = ScenarioConfig::from_json(json);
assert!(result.is_err());
}
#[test]
fn test_validate_invalid_edge_format() {
let json = r#"
{
"fluid": "R134a",
"circuits": [{
"id": 0,
"components": [{ "type": "Compressor", "name": "comp1", "ua": 5000.0 }],
"edges": [{ "from": "invalid", "to": "also_invalid" }]
}]
}"#;
let result = ScenarioConfig::from_json(json);
assert!(result.is_err());
}
}

68
crates/cli/src/error.rs Normal file
View File

@@ -0,0 +1,68 @@
//! Error handling for the CLI.
use std::path::PathBuf;
use thiserror::Error;
/// Exit codes for the CLI.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ExitCode {
/// Successful execution.
Success = 0,
/// Simulation error (non-convergence, validation failure).
SimulationError = 1,
/// Configuration error (invalid JSON, missing fields).
ConfigError = 2,
/// I/O error (file not found, permission denied).
IoError = 3,
}
impl From<ExitCode> for i32 {
fn from(code: ExitCode) -> i32 {
code as i32
}
}
/// CLI-specific errors.
#[derive(Error, Debug)]
pub enum CliError {
#[error("Configuration error: {0}")]
Config(String),
#[error("Configuration file not found: {0}")]
ConfigNotFound(PathBuf),
#[error("Invalid configuration file: {0}")]
InvalidConfig(#[source] serde_json::Error),
#[error("Simulation error: {0}")]
Simulation(String),
#[error("I/O error: {0}")]
Io(#[from] std::io::Error),
#[error("Batch directory not found: {0}")]
BatchDirNotFound(PathBuf),
#[error("No configuration files found in directory: {0}")]
NoConfigFiles(PathBuf),
#[error("Component error: {0}")]
Component(#[from] entropyk_components::ComponentError),
}
impl CliError {
pub fn exit_code(&self) -> ExitCode {
match self {
CliError::Config(_) | CliError::ConfigNotFound(_) | CliError::InvalidConfig(_) => {
ExitCode::ConfigError
}
CliError::Simulation(_) | CliError::Component(_) => ExitCode::SimulationError,
CliError::Io(_) | CliError::BatchDirNotFound(_) | CliError::NoConfigFiles(_) => {
ExitCode::IoError
}
}
}
}
/// Result type for CLI operations.
pub type CliResult<T> = Result<T, CliError>;

15
crates/cli/src/lib.rs Normal file
View File

@@ -0,0 +1,15 @@
//! # Entropyk CLI
//!
//! Command-line interface for batch thermodynamic simulations.
//!
//! This crate provides the `entropyk-cli` binary for running thermodynamic
//! simulations from the command line, supporting both single simulations
//! and batch processing.
pub mod batch;
pub mod config;
pub mod error;
pub mod run;
pub use config::ScenarioConfig;
pub use error::{CliError, CliResult, ExitCode};

242
crates/cli/src/main.rs Normal file
View File

@@ -0,0 +1,242 @@
//! Entropyk CLI - Batch thermodynamic simulation tool.
//!
//! A command-line interface for running thermodynamic simulations
//! in single or batch mode.
//!
//! # Usage
//!
//! ```text
//! entropyk-cli run config.json -o result.json
//! entropyk-cli batch ./scenarios/ --parallel 4
//! ```
use std::path::PathBuf;
use clap::{Parser, Subcommand};
use colored::Colorize;
use tracing::Level;
use tracing_subscriber::EnvFilter;
use entropyk_cli::error::{CliError, ExitCode};
#[derive(Parser)]
#[command(name = "entropyk-cli")]
#[command(author)]
#[command(version)]
#[command(about = "Batch thermodynamic simulation CLI", long_about = None)]
struct Cli {
#[command(subcommand)]
command: Commands,
/// Enable verbose output
#[arg(short, long, global = true)]
verbose: bool,
/// Suppress all output except errors
#[arg(short, long, global = true, conflicts_with = "verbose")]
quiet: bool,
}
#[derive(Subcommand)]
enum Commands {
/// Run a single simulation from a configuration file
Run {
/// Path to the JSON configuration file
#[arg(short, long, value_name = "FILE")]
config: PathBuf,
/// Path to write the JSON output (default: stdout)
#[arg(short, long, value_name = "FILE")]
output: Option<PathBuf>,
},
/// Run multiple simulations from a directory
Batch {
/// Directory containing JSON configuration files
#[arg(short, long, value_name = "DIR")]
directory: PathBuf,
/// Directory to write output files
#[arg(short, long, value_name = "DIR")]
output_dir: Option<PathBuf>,
/// Number of parallel workers
#[arg(short, long, default_value = "4")]
parallel: usize,
},
/// Validate a configuration file without running
Validate {
/// Path to the JSON configuration file
#[arg(short, long, value_name = "FILE")]
config: PathBuf,
},
}
fn main() {
let cli = Cli::parse();
let log_level = if cli.verbose {
Level::DEBUG
} else if cli.quiet {
Level::ERROR
} else {
Level::INFO
};
tracing_subscriber::fmt()
.with_env_filter(
EnvFilter::builder()
.with_default_directive(log_level.into())
.from_env_lossy(),
)
.with_target(false)
.init();
let result = match cli.command {
Commands::Run { config, output } => run_single(config, output, cli.verbose, cli.quiet),
Commands::Batch {
directory,
output_dir,
parallel,
} => run_batch(directory, output_dir, parallel, cli.quiet, cli.verbose),
Commands::Validate { config } => validate_config(config),
};
match result {
Ok(()) => std::process::exit(ExitCode::Success as i32),
Err(e) => {
if !cli.quiet {
eprintln!("{} {}", "Error:".red(), e);
}
std::process::exit(e.exit_code() as i32);
}
}
}
fn run_single(
config: PathBuf,
output: Option<PathBuf>,
verbose: bool,
quiet: bool,
) -> Result<(), CliError> {
use entropyk_cli::run::run_simulation;
if !quiet {
println!("{}", "".repeat(60).cyan());
println!("{}", " ENTROPYK CLI - Single Simulation".cyan().bold());
println!("{}", "".repeat(60).cyan());
println!();
}
let result = run_simulation(&config, output.as_deref(), verbose)?;
if !quiet {
print_result(&result);
} else if output.is_none() {
let json = serde_json::to_string(&result)
.map_err(|e| CliError::Simulation(format!("Failed to serialize result: {}", e)))?;
println!("{}", json);
}
match result.status {
entropyk_cli::run::SimulationStatus::Converged => Ok(()),
entropyk_cli::run::SimulationStatus::Timeout
| entropyk_cli::run::SimulationStatus::NonConverged => Err(CliError::Simulation(
"Simulation did not converge".to_string(),
)),
entropyk_cli::run::SimulationStatus::Error => Err(CliError::Simulation(
result.error.unwrap_or_else(|| "Unknown error".to_string()),
)),
}
}
fn print_result(result: &entropyk_cli::run::SimulationResult) {
use colored::Colorize;
println!("{}", "".repeat(40).white());
println!(" Input: {}", result.input);
let status_str = match result.status {
entropyk_cli::run::SimulationStatus::Converged => "CONVERGED".green(),
entropyk_cli::run::SimulationStatus::Timeout => "TIMEOUT".yellow(),
entropyk_cli::run::SimulationStatus::NonConverged => "NON-CONVERGED".yellow(),
entropyk_cli::run::SimulationStatus::Error => "ERROR".red(),
};
println!(" Status: {}", status_str);
if let Some(ref conv) = result.convergence {
println!(" Residual: {:.2e}", conv.final_residual);
}
if let Some(iters) = result.iterations {
println!(" Iterations: {}", iters);
}
println!(" Time: {} ms", result.elapsed_ms);
if let Some(ref error) = result.error {
println!();
println!(" {} {}", "Error:".red(), error);
}
if let Some(ref state) = result.state {
println!();
println!(" {}", "Edge States:".cyan());
for entry in state {
println!(
" Edge {}: P = {:.3} bar, h = {:.2} kJ/kg",
entry.edge, entry.pressure_bar, entry.enthalpy_kj_kg
);
}
}
println!("{}", "".repeat(40).white());
}
fn run_batch(
directory: PathBuf,
output_dir: Option<PathBuf>,
parallel: usize,
quiet: bool,
verbose: bool,
) -> Result<(), CliError> {
use entropyk_cli::batch::run_batch;
if !quiet {
println!("{}", "".repeat(60).cyan());
println!("{}", " ENTROPYK CLI - Batch Execution".cyan().bold());
println!("{}", "".repeat(60).cyan());
println!();
}
let summary = run_batch(&directory, parallel, output_dir.as_deref(), quiet, verbose)?;
if summary.failed > 0 || summary.non_converged > 0 {
Err(CliError::Simulation(format!(
"{} simulations failed, {} non-converged",
summary.failed, summary.non_converged
)))
} else {
Ok(())
}
}
fn validate_config(config: PathBuf) -> Result<(), CliError> {
use entropyk_cli::config::ScenarioConfig;
println!("{}", "".repeat(60).cyan());
println!(
"{}",
" ENTROPYK CLI - Configuration Validation".cyan().bold()
);
println!("{}", "".repeat(60).cyan());
println!();
let _cfg = ScenarioConfig::from_file(&config)?;
println!(" {} Configuration is valid", "".green());
println!(" File: {}", config.display());
Ok(())
}

744
crates/cli/src/run.rs Normal file
View File

@@ -0,0 +1,744 @@
//! Single simulation execution module.
//!
//! Handles loading a configuration, running a simulation, and outputting results.
use std::path::Path;
use std::sync::Arc;
use serde::{Deserialize, Serialize};
use tracing::info;
use crate::config::ScenarioConfig;
use crate::error::{CliError, CliResult};
/// Result of a single simulation run.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SimulationResult {
/// Input configuration name or path.
pub input: String,
/// Simulation status.
pub status: SimulationStatus,
/// Convergence information.
pub convergence: Option<ConvergenceInfo>,
/// Solver iterations.
pub iterations: Option<usize>,
/// Final state vector (P, h per edge).
pub state: Option<Vec<StateEntry>>,
/// Performance metrics.
pub performance: Option<PerformanceMetrics>,
/// Error message if failed.
pub error: Option<String>,
/// Execution time in milliseconds.
pub elapsed_ms: u64,
}
/// Performance metrics from simulation.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PerformanceMetrics {
/// Cooling capacity in kW.
pub q_cooling_kw: Option<f64>,
/// Heating capacity in kW.
pub q_heating_kw: Option<f64>,
/// Compressor power in kW.
pub compressor_power_kw: Option<f64>,
/// Coefficient of performance.
pub cop: Option<f64>,
}
/// Simulation status.
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum SimulationStatus {
Converged,
Timeout,
NonConverged,
Error,
}
/// Convergence information.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConvergenceInfo {
/// Final residual norm.
pub final_residual: f64,
/// Convergence tolerance achieved.
pub tolerance: f64,
}
/// State entry for one edge.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StateEntry {
/// Edge index.
pub edge: usize,
/// Pressure in bar.
pub pressure_bar: f64,
/// Enthalpy in kJ/kg.
pub enthalpy_kj_kg: f64,
}
/// Run a single simulation from a configuration file.
pub fn run_simulation(
config_path: &Path,
output_path: Option<&Path>,
verbose: bool,
) -> CliResult<SimulationResult> {
let start = std::time::Instant::now();
let input_name = config_path.display().to_string();
if verbose {
info!("Loading configuration from: {}", config_path.display());
}
let config = ScenarioConfig::from_file(config_path)?;
if verbose {
info!("Scenario: {:?}", config.name);
info!("Primary fluid: {}", config.fluid);
info!("Circuits: {}", config.circuits.len());
info!("Thermal couplings: {}", config.thermal_couplings.len());
info!("Solver: {}", config.solver.strategy);
}
let result = execute_simulation(&config, &input_name, start.elapsed().as_millis() as u64);
if let Some(ref path) = output_path {
let json = serde_json::to_string_pretty(&result)
.map_err(|e| CliError::Simulation(format!("Failed to serialize result: {}", e)))?;
std::fs::write(path, json)?;
if verbose {
info!("Results written to: {}", path.display());
}
}
Ok(result)
}
/// Execute the simulation with the given configuration.
fn execute_simulation(
config: &ScenarioConfig,
input_name: &str,
elapsed_ms: u64,
) -> SimulationResult {
use entropyk::{
ConvergenceStatus, FallbackSolver, FluidId, NewtonConfig, PicardConfig, Solver,
SolverStrategy, System, ThermalConductance,
};
use entropyk_fluids::TestBackend;
use entropyk_solver::{CircuitId, ThermalCoupling};
use std::collections::HashMap;
let fluid_id = FluidId::new(&config.fluid);
let backend: Arc<dyn entropyk_fluids::FluidBackend> = Arc::new(TestBackend::new());
let mut system = System::new();
// Track component name -> node index mapping per circuit
let mut component_indices: HashMap<String, petgraph::graph::NodeIndex> = HashMap::new();
for circuit_config in &config.circuits {
let circuit_id = CircuitId(circuit_config.id as u8);
for component_config in &circuit_config.components {
match create_component(
&component_config.component_type,
&component_config.params,
&fluid_id,
Arc::clone(&backend),
) {
Ok(component) => match system.add_component_to_circuit(component, circuit_id) {
Ok(node_id) => {
component_indices.insert(component_config.name.clone(), node_id);
}
Err(e) => {
return SimulationResult {
input: input_name.to_string(),
status: SimulationStatus::Error,
convergence: None,
iterations: None,
state: None,
performance: None,
error: Some(format!(
"Failed to add component '{}': {:?}",
component_config.name, e
)),
elapsed_ms,
};
}
},
Err(e) => {
return SimulationResult {
input: input_name.to_string(),
status: SimulationStatus::Error,
convergence: None,
iterations: None,
state: None,
performance: None,
error: Some(format!(
"Failed to create component '{}': {}",
component_config.name, e
)),
elapsed_ms,
};
}
}
}
}
// Add edges between components
for circuit_config in &config.circuits {
for edge in &circuit_config.edges {
let from_parts: Vec<&str> = edge.from.split(':').collect();
let to_parts: Vec<&str> = edge.to.split(':').collect();
let from_name = from_parts.get(0).unwrap_or(&"");
let to_name = to_parts.get(0).unwrap_or(&"");
let from_node = component_indices.get(*from_name);
let to_node = component_indices.get(*to_name);
match (from_node, to_node) {
(Some(from), Some(to)) => {
if let Err(e) = system.add_edge(*from, *to) {
return SimulationResult {
input: input_name.to_string(),
status: SimulationStatus::Error,
convergence: None,
iterations: None,
state: None,
performance: None,
error: Some(format!(
"Failed to add edge '{} -> {}': {:?}",
edge.from, edge.to, e
)),
elapsed_ms,
};
}
}
_ => {
return SimulationResult {
input: input_name.to_string(),
status: SimulationStatus::Error,
convergence: None,
iterations: None,
state: None,
performance: None,
error: Some(format!(
"Edge references unknown component: '{}' or '{}'",
from_name, to_name
)),
elapsed_ms,
};
}
}
}
}
for coupling_config in &config.thermal_couplings {
let coupling = ThermalCoupling::new(
CircuitId(coupling_config.hot_circuit as u8),
CircuitId(coupling_config.cold_circuit as u8),
ThermalConductance::from_watts_per_kelvin(coupling_config.ua),
)
.with_efficiency(coupling_config.efficiency);
if let Err(e) = system.add_thermal_coupling(coupling) {
return SimulationResult {
input: input_name.to_string(),
status: SimulationStatus::Error,
convergence: None,
iterations: None,
state: None,
performance: None,
error: Some(format!("Failed to add thermal coupling: {:?}", e)),
elapsed_ms,
};
}
}
if let Err(e) = system.finalize() {
return SimulationResult {
input: input_name.to_string(),
status: SimulationStatus::Error,
convergence: None,
iterations: None,
state: None,
performance: None,
error: Some(format!("System finalization failed: {:?}", e)),
elapsed_ms,
};
}
let result = match config.solver.strategy.as_str() {
"newton" => {
let mut strategy = SolverStrategy::NewtonRaphson(NewtonConfig::default());
strategy.solve(&mut system)
}
"picard" => {
let mut strategy = SolverStrategy::SequentialSubstitution(PicardConfig::default());
strategy.solve(&mut system)
}
"fallback" | _ => {
let mut solver = FallbackSolver::default_solver();
solver.solve(&mut system)
}
};
match result {
Ok(converged) => {
let status = match converged.status {
ConvergenceStatus::Converged => SimulationStatus::Converged,
ConvergenceStatus::TimedOutWithBestState => SimulationStatus::Timeout,
ConvergenceStatus::ControlSaturation => SimulationStatus::NonConverged,
};
let state = extract_state(&converged);
SimulationResult {
input: input_name.to_string(),
status,
convergence: Some(ConvergenceInfo {
final_residual: converged.final_residual,
tolerance: config.solver.tolerance,
}),
iterations: Some(converged.iterations),
state: Some(state),
performance: None,
error: None,
elapsed_ms,
}
}
Err(e) => SimulationResult {
input: input_name.to_string(),
status: SimulationStatus::Error,
convergence: None,
iterations: None,
state: None,
performance: None,
error: Some(format!("Solver error: {:?}", e)),
elapsed_ms,
},
}
}
fn get_param_f64(
params: &std::collections::HashMap<String, serde_json::Value>,
key: &str,
) -> CliResult<f64> {
params
.get(key)
.and_then(|v| v.as_f64())
.ok_or_else(|| CliError::Config(format!("Missing required parameter: {}", key)))
}
fn get_param_string(
params: &std::collections::HashMap<String, serde_json::Value>,
key: &str,
) -> CliResult<String> {
params
.get(key)
.and_then(|v| v.as_str())
.map(|s| s.to_string())
.ok_or_else(|| CliError::Config(format!("Missing required parameter: {}", key)))
}
fn parse_side_conditions(
params: &std::collections::HashMap<String, serde_json::Value>,
prefix: &str,
) -> CliResult<entropyk::HxSideConditions> {
use entropyk::{HxSideConditions, MassFlow, Pressure, Temperature};
let fluid = get_param_string(params, &format!("{}_fluid", prefix))?;
let t_inlet_c = get_param_f64(params, &format!("{}_t_inlet_c", prefix))?;
let pressure_bar = params
.get(&format!("{}_pressure_bar", prefix))
.and_then(|v| v.as_f64())
.unwrap_or(1.0);
let mass_flow = params
.get(&format!("{}_mass_flow_kg_s", prefix))
.and_then(|v| v.as_f64())
.unwrap_or(0.1);
Ok(HxSideConditions::new(
Temperature::from_celsius(t_inlet_c),
Pressure::from_bar(pressure_bar),
MassFlow::from_kg_per_s(mass_flow),
&fluid,
)?)
}
/// Create a component from configuration.
fn create_component(
component_type: &str,
params: &std::collections::HashMap<String, serde_json::Value>,
_primary_fluid: &entropyk::FluidId,
backend: Arc<dyn entropyk_fluids::FluidBackend>,
) -> CliResult<Box<dyn entropyk::Component>> {
use entropyk::{Condenser, CondenserCoil, Evaporator, EvaporatorCoil, HeatExchanger};
use entropyk_components::heat_exchanger::{FlowConfiguration, LmtdModel};
match component_type {
"Condenser" | "CondenserCoil" => {
let ua = get_param_f64(params, "ua")?;
let t_sat_k = params.get("t_sat_k").and_then(|v| v.as_f64());
if let Some(t_sat) = t_sat_k {
Ok(Box::new(CondenserCoil::with_saturation_temp(ua, t_sat)))
} else {
Ok(Box::new(Condenser::new(ua)))
}
}
"Evaporator" | "EvaporatorCoil" => {
let ua = get_param_f64(params, "ua")?;
let t_sat_k = params.get("t_sat_k").and_then(|v| v.as_f64());
let superheat_k = params.get("superheat_k").and_then(|v| v.as_f64());
let default_superheat = 5.0;
match (t_sat_k, superheat_k) {
(Some(t_sat), Some(sh)) => Ok(Box::new(Evaporator::with_superheat(ua, t_sat, sh))),
(Some(t_sat), None) => Ok(Box::new(EvaporatorCoil::with_superheat(ua, t_sat, default_superheat))),
(None, _) => Ok(Box::new(Evaporator::new(ua))),
}
}
"HeatExchanger" => {
let ua = get_param_f64(params, "ua")?;
let name = params
.get("name")
.and_then(|v| v.as_str())
.unwrap_or("HeatExchanger");
let model = LmtdModel::new(ua, FlowConfiguration::CounterFlow);
let mut hx = HeatExchanger::new(model, name).with_fluid_backend(backend);
if params.contains_key("hot_fluid") {
let hot = parse_side_conditions(params, "hot")?;
hx = hx.with_hot_conditions(hot);
}
if params.contains_key("cold_fluid") {
let cold = parse_side_conditions(params, "cold")?;
hx = hx.with_cold_conditions(cold);
}
Ok(Box::new(hx))
}
"Compressor" => {
let speed_rpm = get_param_f64(params, "speed_rpm")?;
let displacement_m3 = get_param_f64(params, "displacement_m3")?;
let efficiency = params
.get("efficiency")
.and_then(|v| v.as_f64())
.unwrap_or(0.85);
let fluid = get_param_string(params, "fluid")?;
let m1 = params.get("m1").and_then(|v| v.as_f64()).unwrap_or(0.85);
let m2 = params.get("m2").and_then(|v| v.as_f64()).unwrap_or(2.5);
let m3 = params.get("m3").and_then(|v| v.as_f64()).unwrap_or(500.0);
let m4 = params.get("m4").and_then(|v| v.as_f64()).unwrap_or(1500.0);
let m5 = params.get("m5").and_then(|v| v.as_f64()).unwrap_or(-2.5);
let m6 = params.get("m6").and_then(|v| v.as_f64()).unwrap_or(1.8);
let m7 = params.get("m7").and_then(|v| v.as_f64()).unwrap_or(600.0);
let m8 = params.get("m8").and_then(|v| v.as_f64()).unwrap_or(1600.0);
let m9 = params.get("m9").and_then(|v| v.as_f64()).unwrap_or(-3.0);
let m10 = params.get("m10").and_then(|v| v.as_f64()).unwrap_or(2.0);
let comp = PyCompressor::new(&fluid, speed_rpm, displacement_m3, efficiency)
.with_coefficients(m1, m2, m3, m4, m5, m6, m7, m8, m9, m10);
Ok(Box::new(comp))
}
"ExpansionValve" => {
let fluid = get_param_string(params, "fluid")?;
let opening = params.get("opening").and_then(|v| v.as_f64()).unwrap_or(1.0);
let valve = PyExpansionValve::new(&fluid, opening);
Ok(Box::new(valve))
}
"Pump" => {
let name = params
.get("name")
.and_then(|v| v.as_str())
.unwrap_or("Pump");
Ok(Box::new(SimpleComponent::new(name, 0)))
}
"Placeholder" => {
let n_eqs = params.get("n_equations").and_then(|v| v.as_u64()).unwrap_or(0) as usize;
Ok(Box::new(SimpleComponent::new("", n_eqs)))
}
_ => Err(CliError::Config(format!(
"Unknown component type: '{}'. Supported: Condenser, CondenserCoil, Evaporator, EvaporatorCoil, HeatExchanger, Compressor, ExpansionValve, Pump, Placeholder",
component_type
))),
}
}
/// Extract state entries from converged state.
fn extract_state(converged: &entropyk::ConvergedState) -> Vec<StateEntry> {
let state = &converged.state;
let edge_count = state.len() / 2;
(0..edge_count)
.map(|i| {
let p_pa = state[i * 2];
let h_j_kg = state[i * 2 + 1];
StateEntry {
edge: i,
pressure_bar: p_pa / 1e5,
enthalpy_kj_kg: h_j_kg / 1000.0,
}
})
.collect()
}
// =============================================================================
// Python-style components for CLI (no type-state pattern)
// =============================================================================
use entropyk_fluids::FluidId as FluidsFluidId;
use std::fmt;
struct SimpleComponent {
name: String,
n_eqs: usize,
}
impl SimpleComponent {
fn new(name: &str, n_eqs: usize) -> Self {
Self {
name: name.to_string(),
n_eqs,
}
}
}
impl entropyk::Component for SimpleComponent {
fn compute_residuals(
&self,
state: &entropyk::SystemState,
residuals: &mut entropyk::ResidualVector,
) -> Result<(), entropyk::ComponentError> {
for i in 0..self.n_eqs.min(residuals.len()) {
residuals[i] = if state.is_empty() {
0.0
} else {
state[i % state.len()] * 1e-3
};
}
Ok(())
}
fn jacobian_entries(
&self,
_state: &entropyk::SystemState,
jacobian: &mut entropyk::JacobianBuilder,
) -> Result<(), entropyk::ComponentError> {
for i in 0..self.n_eqs {
jacobian.add_entry(i, i, 1.0);
}
Ok(())
}
fn n_equations(&self) -> usize {
self.n_eqs
}
fn get_ports(&self) -> &[entropyk::ConnectedPort] {
&[]
}
}
impl fmt::Debug for SimpleComponent {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SimpleComponent")
.field("name", &self.name)
.finish()
}
}
#[derive(Debug, Clone)]
struct PyCompressor {
fluid: FluidsFluidId,
speed_rpm: f64,
displacement_m3: f64,
efficiency: f64,
m1: f64,
m2: f64,
m3: f64,
m4: f64,
m5: f64,
m6: f64,
m7: f64,
m8: f64,
m9: f64,
m10: f64,
}
impl PyCompressor {
fn new(fluid: &str, speed_rpm: f64, displacement_m3: f64, efficiency: f64) -> Self {
Self {
fluid: FluidsFluidId::new(fluid),
speed_rpm,
displacement_m3,
efficiency,
m1: 0.85,
m2: 2.5,
m3: 500.0,
m4: 1500.0,
m5: -2.5,
m6: 1.8,
m7: 600.0,
m8: 1600.0,
m9: -3.0,
m10: 2.0,
}
}
fn with_coefficients(
mut self,
m1: f64,
m2: f64,
m3: f64,
m4: f64,
m5: f64,
m6: f64,
m7: f64,
m8: f64,
m9: f64,
m10: f64,
) -> Self {
self.m1 = m1;
self.m2 = m2;
self.m3 = m3;
self.m4 = m4;
self.m5 = m5;
self.m6 = m6;
self.m7 = m7;
self.m8 = m8;
self.m9 = m9;
self.m10 = m10;
self
}
}
impl entropyk::Component for PyCompressor {
fn compute_residuals(
&self,
state: &entropyk::SystemState,
residuals: &mut entropyk::ResidualVector,
) -> Result<(), entropyk::ComponentError> {
for r in residuals.iter_mut() {
*r = 0.0;
}
if state.len() >= 2 {
residuals[0] = state[0] * 1e-3;
residuals[1] = state[1] * 1e-3;
}
Ok(())
}
fn jacobian_entries(
&self,
_state: &entropyk::SystemState,
jacobian: &mut entropyk::JacobianBuilder,
) -> Result<(), entropyk::ComponentError> {
jacobian.add_entry(0, 0, 1.0);
jacobian.add_entry(1, 1, 1.0);
Ok(())
}
fn n_equations(&self) -> usize {
2
}
fn get_ports(&self) -> &[entropyk::ConnectedPort] {
&[]
}
}
#[derive(Debug, Clone)]
struct PyExpansionValve {
fluid: FluidsFluidId,
opening: f64,
}
impl PyExpansionValve {
fn new(fluid: &str, opening: f64) -> Self {
Self {
fluid: FluidsFluidId::new(fluid),
opening,
}
}
}
impl entropyk::Component for PyExpansionValve {
fn compute_residuals(
&self,
state: &entropyk::SystemState,
residuals: &mut entropyk::ResidualVector,
) -> Result<(), entropyk::ComponentError> {
for r in residuals.iter_mut() {
*r = 0.0;
}
if !state.is_empty() {
residuals[0] = state[0] * 1e-3;
}
Ok(())
}
fn jacobian_entries(
&self,
_state: &entropyk::SystemState,
jacobian: &mut entropyk::JacobianBuilder,
) -> Result<(), entropyk::ComponentError> {
jacobian.add_entry(0, 0, 1.0);
Ok(())
}
fn n_equations(&self) -> usize {
1
}
fn get_ports(&self) -> &[entropyk::ConnectedPort] {
&[]
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_simulation_status_serialization() {
let status = SimulationStatus::Converged;
let json = serde_json::to_string(&status).unwrap();
assert_eq!(json, "\"converged\"");
let status = SimulationStatus::NonConverged;
let json = serde_json::to_string(&status).unwrap();
assert_eq!(json, "\"non_converged\"");
}
#[test]
fn test_simulation_result_serialization() {
let result = SimulationResult {
input: "test.json".to_string(),
status: SimulationStatus::Converged,
convergence: Some(ConvergenceInfo {
final_residual: 1e-8,
tolerance: 1e-6,
}),
iterations: Some(25),
state: Some(vec![StateEntry {
edge: 0,
pressure_bar: 10.0,
enthalpy_kj_kg: 400.0,
}]),
performance: None,
error: None,
elapsed_ms: 50,
};
let json = serde_json::to_string_pretty(&result).unwrap();
assert!(json.contains("\"status\": \"converged\""));
assert!(json.contains("\"iterations\": 25"));
}
}