468 lines
17 KiB
Rust
468 lines
17 KiB
Rust
//! Sequential Substitution (Picard iteration) solver implementation.
|
|
//!
|
|
//! Provides [`PicardConfig`] which implements Picard iteration for solving
|
|
//! systems of non-linear equations. Slower than Newton-Raphson but more robust.
|
|
|
|
use std::time::{Duration, Instant};
|
|
|
|
use crate::criteria::ConvergenceCriteria;
|
|
use crate::metadata::SimulationMetadata;
|
|
use crate::solver::{ConvergedState, ConvergenceStatus, Solver, SolverError, TimeoutConfig};
|
|
use crate::system::System;
|
|
|
|
/// Configuration for the Sequential Substitution (Picard iteration) solver.
|
|
///
|
|
/// Solves x = G(x) by iterating: x_{k+1} = (1-ω)·x_k + ω·G(x_k)
|
|
/// where ω ∈ (0,1] is the relaxation factor.
|
|
#[derive(Debug, Clone, PartialEq)]
|
|
pub struct PicardConfig {
|
|
/// Maximum iterations. Default: 100.
|
|
pub max_iterations: usize,
|
|
/// Convergence tolerance (L2 norm). Default: 1e-6.
|
|
pub tolerance: f64,
|
|
/// Relaxation factor ω ∈ (0,1]. Default: 0.5.
|
|
pub relaxation_factor: f64,
|
|
/// Optional time budget.
|
|
pub timeout: Option<Duration>,
|
|
/// Divergence threshold. Default: 1e10.
|
|
pub divergence_threshold: f64,
|
|
/// Consecutive increases before divergence. Default: 5.
|
|
pub divergence_patience: usize,
|
|
/// Timeout behavior configuration.
|
|
pub timeout_config: TimeoutConfig,
|
|
/// Previous state for ZOH fallback.
|
|
pub previous_state: Option<Vec<f64>>,
|
|
/// Residual for previous_state.
|
|
pub previous_residual: Option<f64>,
|
|
/// Smart initial state for cold-start.
|
|
pub initial_state: Option<Vec<f64>>,
|
|
/// Multi-circuit convergence criteria.
|
|
pub convergence_criteria: Option<ConvergenceCriteria>,
|
|
}
|
|
|
|
impl Default for PicardConfig {
|
|
fn default() -> Self {
|
|
Self {
|
|
max_iterations: 100,
|
|
tolerance: 1e-6,
|
|
relaxation_factor: 0.5,
|
|
timeout: None,
|
|
divergence_threshold: 1e10,
|
|
divergence_patience: 5,
|
|
timeout_config: TimeoutConfig::default(),
|
|
previous_state: None,
|
|
previous_residual: None,
|
|
initial_state: None,
|
|
convergence_criteria: None,
|
|
}
|
|
}
|
|
}
|
|
|
|
impl PicardConfig {
|
|
/// Sets the initial state for cold-start solving (Story 4.6 — builder pattern).
|
|
///
|
|
/// The solver will start from `state` instead of the zero vector.
|
|
/// Use [`SmartInitializer::populate_state`] to generate a physically reasonable
|
|
/// initial guess.
|
|
pub fn with_initial_state(mut self, state: Vec<f64>) -> Self {
|
|
self.initial_state = Some(state);
|
|
self
|
|
}
|
|
|
|
/// Sets multi-circuit convergence criteria (Story 4.7 — builder pattern).
|
|
///
|
|
/// When set, the solver uses [`ConvergenceCriteria::check()`] instead of the
|
|
/// raw L2-norm `tolerance` check.
|
|
pub fn with_convergence_criteria(mut self, criteria: ConvergenceCriteria) -> Self {
|
|
self.convergence_criteria = Some(criteria);
|
|
self
|
|
}
|
|
|
|
/// Computes the residual norm (L2 norm of the residual vector).
|
|
fn residual_norm(residuals: &[f64]) -> f64 {
|
|
residuals.iter().map(|r| r * r).sum::<f64>().sqrt()
|
|
}
|
|
|
|
/// Handles timeout based on configuration (Story 4.5).
|
|
///
|
|
/// Returns either:
|
|
/// - `Ok(ConvergedState)` with `TimedOutWithBestState` status (default)
|
|
/// - `Err(SolverError::Timeout)` if `return_best_state_on_timeout` is false
|
|
/// - Previous state (ZOH) if `zoh_fallback` is true and previous state available
|
|
fn handle_timeout(
|
|
&self,
|
|
best_state: &[f64],
|
|
best_residual: f64,
|
|
iterations: usize,
|
|
timeout: Duration,
|
|
system: &System,
|
|
) -> Result<ConvergedState, SolverError> {
|
|
// If configured to return error on timeout
|
|
if !self.timeout_config.return_best_state_on_timeout {
|
|
return Err(SolverError::Timeout {
|
|
timeout_ms: timeout.as_millis() as u64,
|
|
});
|
|
}
|
|
|
|
// If ZOH fallback is enabled and previous state is available
|
|
if self.timeout_config.zoh_fallback {
|
|
if let Some(ref prev_state) = self.previous_state {
|
|
let residual = self.previous_residual.unwrap_or(best_residual);
|
|
tracing::info!(
|
|
iterations = iterations,
|
|
residual = residual,
|
|
"Returning previous state (ZOH fallback) on timeout"
|
|
);
|
|
return Ok(ConvergedState::new(
|
|
prev_state.clone(),
|
|
iterations,
|
|
residual,
|
|
ConvergenceStatus::TimedOutWithBestState,
|
|
SimulationMetadata::new(system.input_hash()),
|
|
));
|
|
}
|
|
}
|
|
|
|
// Default: return best state encountered during iteration
|
|
tracing::info!(
|
|
iterations = iterations,
|
|
best_residual = best_residual,
|
|
"Returning best state on timeout"
|
|
);
|
|
Ok(ConvergedState::new(
|
|
best_state.to_vec(),
|
|
iterations,
|
|
best_residual,
|
|
ConvergenceStatus::TimedOutWithBestState,
|
|
SimulationMetadata::new(system.input_hash()),
|
|
))
|
|
}
|
|
|
|
/// Checks for divergence based on residual growth pattern.
|
|
///
|
|
/// Returns `Some(SolverError::Divergence)` if:
|
|
/// - Residual norm exceeds `divergence_threshold`, or
|
|
/// - Residual has increased for `divergence_patience`+ consecutive iterations
|
|
fn check_divergence(
|
|
&self,
|
|
current_norm: f64,
|
|
previous_norm: f64,
|
|
divergence_count: &mut usize,
|
|
) -> Option<SolverError> {
|
|
// Check absolute threshold
|
|
if current_norm > self.divergence_threshold {
|
|
return Some(SolverError::Divergence {
|
|
reason: format!(
|
|
"Residual norm {} exceeds threshold {}",
|
|
current_norm, self.divergence_threshold
|
|
),
|
|
});
|
|
}
|
|
|
|
// Check consecutive increases
|
|
if current_norm > previous_norm {
|
|
*divergence_count += 1;
|
|
if *divergence_count >= self.divergence_patience {
|
|
return Some(SolverError::Divergence {
|
|
reason: format!(
|
|
"Residual increased for {} consecutive iterations: {:.6e} → {:.6e}",
|
|
self.divergence_patience, previous_norm, current_norm
|
|
),
|
|
});
|
|
}
|
|
} else {
|
|
*divergence_count = 0;
|
|
}
|
|
|
|
None
|
|
}
|
|
|
|
/// Applies relaxation to the state update.
|
|
///
|
|
/// Update formula: x_new = x_old - omega * residual
|
|
/// where residual = F(x_k) represents the equation residuals.
|
|
///
|
|
/// This is the standard Picard iteration: x_{k+1} = x_k - ω·F(x_k)
|
|
fn apply_relaxation(state: &mut [f64], residuals: &[f64], omega: f64) {
|
|
for (x, &r) in state.iter_mut().zip(residuals.iter()) {
|
|
*x -= omega * r;
|
|
}
|
|
}
|
|
}
|
|
|
|
impl Solver for PicardConfig {
|
|
fn solve(&mut self, system: &mut System) -> Result<ConvergedState, SolverError> {
|
|
let start_time = Instant::now();
|
|
|
|
tracing::info!(
|
|
max_iterations = self.max_iterations,
|
|
tolerance = self.tolerance,
|
|
relaxation_factor = self.relaxation_factor,
|
|
divergence_threshold = self.divergence_threshold,
|
|
divergence_patience = self.divergence_patience,
|
|
"Sequential Substitution (Picard) solver starting"
|
|
);
|
|
|
|
// Get system dimensions
|
|
let n_state = system.full_state_vector_len();
|
|
let n_equations: usize = system
|
|
.traverse_for_jacobian()
|
|
.map(|(_, c, _)| c.n_equations())
|
|
.sum::<usize>()
|
|
+ system.constraints().count()
|
|
+ system.coupling_residual_count();
|
|
|
|
// Validate system
|
|
if n_state == 0 || n_equations == 0 {
|
|
return Err(SolverError::InvalidSystem {
|
|
message: "Empty system has no state variables or equations".to_string(),
|
|
});
|
|
}
|
|
|
|
// Validate state/equation dimensions
|
|
if n_state != n_equations {
|
|
return Err(SolverError::InvalidSystem {
|
|
message: format!(
|
|
"State dimension ({}) does not match equation count ({})",
|
|
n_state, n_equations
|
|
),
|
|
});
|
|
}
|
|
|
|
// Pre-allocate all buffers (AC: #6 - no heap allocation in iteration loop)
|
|
// Story 4.6 - AC: #8: Use initial_state if provided, else start from zeros
|
|
let mut state: Vec<f64> = self
|
|
.initial_state
|
|
.as_ref()
|
|
.map(|s| {
|
|
debug_assert_eq!(
|
|
s.len(),
|
|
n_state,
|
|
"initial_state length mismatch: expected {}, got {}",
|
|
n_state,
|
|
s.len()
|
|
);
|
|
if s.len() == n_state {
|
|
s.clone()
|
|
} else {
|
|
vec![0.0; n_state]
|
|
}
|
|
})
|
|
.unwrap_or_else(|| vec![0.0; n_state]);
|
|
let mut prev_iteration_state: Vec<f64> = vec![0.0; n_state]; // For convergence delta check
|
|
let mut residuals: Vec<f64> = vec![0.0; n_equations];
|
|
let mut divergence_count: usize = 0;
|
|
let mut previous_norm: f64;
|
|
|
|
// Pre-allocate best-state tracking buffer (Story 4.5 - AC: #5)
|
|
let mut best_state: Vec<f64> = vec![0.0; n_state];
|
|
let mut best_residual: f64;
|
|
|
|
// Initial residual computation
|
|
system
|
|
.compute_residuals(&state, &mut residuals)
|
|
.map_err(|e| SolverError::InvalidSystem {
|
|
message: format!("Failed to compute initial residuals: {:?}", e),
|
|
})?;
|
|
|
|
let mut current_norm = Self::residual_norm(&residuals);
|
|
|
|
// Initialize best state tracking with initial state
|
|
best_state.copy_from_slice(&state);
|
|
best_residual = current_norm;
|
|
|
|
tracing::debug!(iteration = 0, residual_norm = current_norm, "Initial state");
|
|
|
|
// Check if already converged
|
|
if current_norm < self.tolerance {
|
|
tracing::info!(
|
|
iterations = 0,
|
|
final_residual = current_norm,
|
|
"System already converged at initial state"
|
|
);
|
|
return Ok(ConvergedState::new(
|
|
state,
|
|
0,
|
|
current_norm,
|
|
ConvergenceStatus::Converged,
|
|
SimulationMetadata::new(system.input_hash()),
|
|
));
|
|
}
|
|
|
|
// Main Picard iteration loop
|
|
for iteration in 1..=self.max_iterations {
|
|
// Save state before step for convergence criteria delta checks
|
|
prev_iteration_state.copy_from_slice(&state);
|
|
|
|
// Check timeout at iteration start (Story 4.5 - AC: #1)
|
|
if let Some(timeout) = self.timeout {
|
|
if start_time.elapsed() > timeout {
|
|
tracing::info!(
|
|
iteration = iteration,
|
|
elapsed_ms = start_time.elapsed().as_millis(),
|
|
timeout_ms = timeout.as_millis(),
|
|
best_residual = best_residual,
|
|
"Solver timed out"
|
|
);
|
|
|
|
// Story 4.5 - AC: #2, #6: Return best state or error based on config
|
|
return self.handle_timeout(
|
|
&best_state,
|
|
best_residual,
|
|
iteration - 1,
|
|
timeout,
|
|
system,
|
|
);
|
|
}
|
|
}
|
|
|
|
// Apply relaxed update: x_new = x_old - omega * residual (AC: #2, #3)
|
|
Self::apply_relaxation(&mut state, &residuals, self.relaxation_factor);
|
|
|
|
// Compute new residuals
|
|
system
|
|
.compute_residuals(&state, &mut residuals)
|
|
.map_err(|e| SolverError::InvalidSystem {
|
|
message: format!("Failed to compute residuals: {:?}", e),
|
|
})?;
|
|
|
|
previous_norm = current_norm;
|
|
current_norm = Self::residual_norm(&residuals);
|
|
|
|
// Update best state if residual improved (Story 4.5 - AC: #2)
|
|
if current_norm < best_residual {
|
|
best_state.copy_from_slice(&state);
|
|
best_residual = current_norm;
|
|
tracing::debug!(
|
|
iteration = iteration,
|
|
best_residual = best_residual,
|
|
"Best state updated"
|
|
);
|
|
}
|
|
|
|
tracing::debug!(
|
|
iteration = iteration,
|
|
residual_norm = current_norm,
|
|
relaxation_factor = self.relaxation_factor,
|
|
"Picard iteration complete"
|
|
);
|
|
|
|
// Check convergence (AC: #1, Story 4.7 — criteria-aware)
|
|
let converged = if let Some(ref criteria) = self.convergence_criteria {
|
|
let report =
|
|
criteria.check(&state, Some(&prev_iteration_state), &residuals, system);
|
|
if report.is_globally_converged() {
|
|
tracing::info!(
|
|
iterations = iteration,
|
|
final_residual = current_norm,
|
|
relaxation_factor = self.relaxation_factor,
|
|
"Sequential Substitution converged (criteria)"
|
|
);
|
|
return Ok(ConvergedState::with_report(
|
|
state,
|
|
iteration,
|
|
current_norm,
|
|
ConvergenceStatus::Converged,
|
|
report,
|
|
SimulationMetadata::new(system.input_hash()),
|
|
));
|
|
}
|
|
false
|
|
} else {
|
|
current_norm < self.tolerance
|
|
};
|
|
|
|
if converged {
|
|
tracing::info!(
|
|
iterations = iteration,
|
|
final_residual = current_norm,
|
|
relaxation_factor = self.relaxation_factor,
|
|
"Sequential Substitution converged"
|
|
);
|
|
return Ok(ConvergedState::new(
|
|
state,
|
|
iteration,
|
|
current_norm,
|
|
ConvergenceStatus::Converged,
|
|
SimulationMetadata::new(system.input_hash()),
|
|
));
|
|
}
|
|
|
|
// Check divergence (AC: #5)
|
|
if let Some(err) =
|
|
self.check_divergence(current_norm, previous_norm, &mut divergence_count)
|
|
{
|
|
tracing::warn!(
|
|
iteration = iteration,
|
|
residual_norm = current_norm,
|
|
"Divergence detected"
|
|
);
|
|
return Err(err);
|
|
}
|
|
}
|
|
|
|
// Max iterations exceeded
|
|
tracing::warn!(
|
|
max_iterations = self.max_iterations,
|
|
final_residual = current_norm,
|
|
"Sequential Substitution did not converge"
|
|
);
|
|
Err(SolverError::NonConvergence {
|
|
iterations: self.max_iterations,
|
|
final_residual: current_norm,
|
|
})
|
|
}
|
|
|
|
fn with_timeout(mut self, timeout: Duration) -> Self {
|
|
self.timeout = Some(timeout);
|
|
self
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
use crate::solver::Solver;
|
|
use crate::system::System;
|
|
use std::time::Duration;
|
|
|
|
#[test]
|
|
fn test_picard_config_with_timeout() {
|
|
let timeout = Duration::from_millis(250);
|
|
let cfg = PicardConfig::default().with_timeout(timeout);
|
|
assert_eq!(cfg.timeout, Some(timeout));
|
|
}
|
|
|
|
#[test]
|
|
fn test_picard_config_default_sensible() {
|
|
let cfg = PicardConfig::default();
|
|
assert_eq!(cfg.max_iterations, 100);
|
|
assert!(cfg.tolerance > 0.0 && cfg.tolerance < 1e-3);
|
|
assert!(cfg.relaxation_factor > 0.0 && cfg.relaxation_factor <= 1.0);
|
|
}
|
|
|
|
#[test]
|
|
fn test_picard_apply_relaxation_formula() {
|
|
let mut state = vec![10.0, 20.0];
|
|
let residuals = vec![1.0, 2.0];
|
|
PicardConfig::apply_relaxation(&mut state, &residuals, 0.5);
|
|
assert!((state[0] - 9.5).abs() < 1e-15);
|
|
assert!((state[1] - 19.0).abs() < 1e-15);
|
|
}
|
|
|
|
#[test]
|
|
fn test_picard_residual_norm() {
|
|
let residuals = vec![3.0, 4.0];
|
|
let norm = PicardConfig::residual_norm(&residuals);
|
|
assert!((norm - 5.0).abs() < 1e-15);
|
|
}
|
|
|
|
#[test]
|
|
fn test_picard_solver_trait_object() {
|
|
let mut boxed: Box<dyn Solver> = Box::new(PicardConfig::default());
|
|
let mut system = System::new();
|
|
system.finalize().unwrap();
|
|
assert!(boxed.solve(&mut system).is_err());
|
|
}
|
|
}
|