From 4a40fddfe35859490f880974dcf3070c1f517d25 Mon Sep 17 00:00:00 2001 From: Sepehr Date: Fri, 20 Feb 2026 22:01:38 +0100 Subject: [PATCH] feat(components): add ThermoState generators and Eurovent backend demo --- .../workflows/bmad-agent-bmad-master.md | 16 + .../workflows/bmad-agent-bmb-agent-builder.md | 16 + .../bmad-agent-bmb-module-builder.md | 16 + .../bmad-agent-bmb-workflow-builder.md | 16 + .../workflows/bmad-agent-bmm-analyst.md | 16 + .../workflows/bmad-agent-bmm-architect.md | 16 + .clinerules/workflows/bmad-agent-bmm-dev.md | 16 + .clinerules/workflows/bmad-agent-bmm-pm.md | 16 + .clinerules/workflows/bmad-agent-bmm-qa.md | 16 + .../bmad-agent-bmm-quick-flow-solo-dev.md | 16 + .clinerules/workflows/bmad-agent-bmm-sm.md | 16 + .../workflows/bmad-agent-bmm-tech-writer.md | 16 + .../workflows/bmad-agent-bmm-ux-designer.md | 16 + .../bmad-agent-cis-brainstorming-coach.md | 16 + .../bmad-agent-cis-creative-problem-solver.md | 16 + .../bmad-agent-cis-design-thinking-coach.md | 16 + .../bmad-agent-cis-innovation-strategist.md | 16 + .../bmad-agent-cis-presentation-master.md | 16 + .../workflows/bmad-agent-cis-storyteller.md | 16 + .../workflows/bmad-bmb-create-agent.md | 10 + .../workflows/bmad-bmb-create-module-brief.md | 10 + .../workflows/bmad-bmb-create-module.md | 10 + .../workflows/bmad-bmb-create-workflow.md | 10 + .clinerules/workflows/bmad-bmb-edit-agent.md | 10 + .clinerules/workflows/bmad-bmb-edit-module.md | 10 + .../workflows/bmad-bmb-edit-workflow.md | 10 + .../workflows/bmad-bmb-rework-workflow.md | 10 + .../workflows/bmad-bmb-validate-agent.md | 10 + ...bmad-bmb-validate-max-parallel-workflow.md | 10 + .../workflows/bmad-bmb-validate-module.md | 10 + .../workflows/bmad-bmb-validate-workflow.md | 10 + ...bmad-bmm-check-implementation-readiness.md | 10 + .clinerules/workflows/bmad-bmm-code-review.md | 15 + .../workflows/bmad-bmm-correct-course.md | 15 + .../workflows/bmad-bmm-create-architecture.md | 10 + .../bmad-bmm-create-epics-and-stories.md | 10 + .clinerules/workflows/bmad-bmm-create-prd.md | 10 + .../bmad-bmm-create-product-brief.md | 10 + .../workflows/bmad-bmm-create-story.md | 15 + .../workflows/bmad-bmm-create-ux-design.md | 10 + .clinerules/workflows/bmad-bmm-dev-story.md | 15 + .../workflows/bmad-bmm-document-project.md | 15 + .../workflows/bmad-bmm-domain-research.md | 10 + .clinerules/workflows/bmad-bmm-edit-prd.md | 10 + .../bmad-bmm-generate-project-context.md | 10 + .../workflows/bmad-bmm-market-research.md | 10 + .clinerules/workflows/bmad-bmm-qa-automate.md | 15 + .clinerules/workflows/bmad-bmm-quick-dev.md | 10 + .clinerules/workflows/bmad-bmm-quick-spec.md | 10 + .../workflows/bmad-bmm-retrospective.md | 15 + .../workflows/bmad-bmm-sprint-planning.md | 15 + .../workflows/bmad-bmm-sprint-status.md | 15 + .../workflows/bmad-bmm-technical-research.md | 10 + .../workflows/bmad-bmm-validate-prd.md | 10 + .clinerules/workflows/bmad-brainstorming.md | 10 + .../workflows/bmad-cis-design-thinking.md | 15 + .../workflows/bmad-cis-innovation-strategy.md | 15 + .../workflows/bmad-cis-problem-solving.md | 15 + .../workflows/bmad-cis-storytelling.md | 15 + .../workflows/bmad-editorial-review-prose.md | 10 + .../bmad-editorial-review-structure.md | 10 + .clinerules/workflows/bmad-help.md | 10 + .clinerules/workflows/bmad-index-docs.md | 10 + .clinerules/workflows/bmad-party-mode.md | 10 + .../bmad-review-adversarial-general.md | 10 + .clinerules/workflows/bmad-shard-doc.md | 10 + .opencode/agent/bmad-agent-bmad-master.md | 2 +- .../agent/bmad-agent-bmb-agent-builder.md | 2 +- .../agent/bmad-agent-bmb-module-builder.md | 2 +- .../agent/bmad-agent-bmb-workflow-builder.md | 2 +- .opencode/agent/bmad-agent-bmm-analyst.md | 2 +- .opencode/agent/bmad-agent-bmm-architect.md | 2 +- .opencode/agent/bmad-agent-bmm-dev.md | 2 +- .opencode/agent/bmad-agent-bmm-pm.md | 2 +- .opencode/agent/bmad-agent-bmm-qa.md | 2 +- .../bmad-agent-bmm-quick-flow-solo-dev.md | 2 +- .opencode/agent/bmad-agent-bmm-sm.md | 2 +- .opencode/agent/bmad-agent-bmm-tech-writer.md | 2 +- .opencode/agent/bmad-agent-bmm-ux-designer.md | 2 +- .../bmad-agent-cis-brainstorming-coach.md | 2 +- .../bmad-agent-cis-creative-problem-solver.md | 2 +- .../bmad-agent-cis-design-thinking-coach.md | 2 +- .../bmad-agent-cis-innovation-strategist.md | 2 +- .../bmad-agent-cis-presentation-master.md | 2 +- .opencode/agent/bmad-agent-cis-storyteller.md | 2 +- .opencode/command/bmad-bmb-create-agent.md | 2 + .../command/bmad-bmb-create-module-brief.md | 2 + .opencode/command/bmad-bmb-create-module.md | 2 + .opencode/command/bmad-bmb-create-workflow.md | 2 + .opencode/command/bmad-bmb-edit-agent.md | 2 + .opencode/command/bmad-bmb-edit-module.md | 2 + .opencode/command/bmad-bmb-edit-workflow.md | 2 + .opencode/command/bmad-bmb-rework-workflow.md | 2 + .opencode/command/bmad-bmb-validate-agent.md | 2 + ...bmad-bmb-validate-max-parallel-workflow.md | 2 + .opencode/command/bmad-bmb-validate-module.md | 2 + .../command/bmad-bmb-validate-workflow.md | 2 + ...bmad-bmm-check-implementation-readiness.md | 2 + .opencode/command/bmad-bmm-code-review.md | 2 + .opencode/command/bmad-bmm-correct-course.md | 2 + .../command/bmad-bmm-create-architecture.md | 2 + .../bmad-bmm-create-epics-and-stories.md | 2 + .opencode/command/bmad-bmm-create-prd.md | 2 + .../command/bmad-bmm-create-product-brief.md | 2 + .opencode/command/bmad-bmm-create-story.md | 2 + .../command/bmad-bmm-create-ux-design.md | 2 + .opencode/command/bmad-bmm-dev-story.md | 2 + .../command/bmad-bmm-document-project.md | 2 + .opencode/command/bmad-bmm-domain-research.md | 2 + .opencode/command/bmad-bmm-edit-prd.md | 2 + .../bmad-bmm-generate-project-context.md | 2 + .opencode/command/bmad-bmm-market-research.md | 2 + .opencode/command/bmad-bmm-qa-automate.md | 2 + .opencode/command/bmad-bmm-quick-dev.md | 2 + .opencode/command/bmad-bmm-quick-spec.md | 2 + .opencode/command/bmad-bmm-retrospective.md | 2 + .opencode/command/bmad-bmm-sprint-planning.md | 2 + .opencode/command/bmad-bmm-sprint-status.md | 2 + .../command/bmad-bmm-technical-research.md | 2 + .opencode/command/bmad-bmm-validate-prd.md | 2 + .opencode/command/bmad-brainstorming.md | 2 + .opencode/command/bmad-cis-design-thinking.md | 2 + .../command/bmad-cis-innovation-strategy.md | 2 + .opencode/command/bmad-cis-problem-solving.md | 2 + .opencode/command/bmad-cis-storytelling.md | 2 + .../command/bmad-editorial-review-prose.md | 2 + .../bmad-editorial-review-structure.md | 2 + .opencode/command/bmad-help.md | 2 + .opencode/command/bmad-index-docs.md | 2 + .opencode/command/bmad-party-mode.md | 2 + .../bmad-review-adversarial-general.md | 2 + .opencode/command/bmad-shard-doc.md | 2 + Cargo.toml | 4 +- EXAMPLES.md | 553 +++++ README_STORY_1_3.md | 355 +++ _bmad-output/planning-artifacts/epics.md | 170 +- _bmad-output/planning-artifacts/prd.md | 22 +- _bmad/_config/agent-manifest.csv | 40 +- _bmad/_config/bmad-help.csv | 96 +- _bmad/_config/files-manifest.csv | 78 +- _bmad/_config/ides/antigravity.yaml | 2 +- _bmad/_config/ides/cline.yaml | 5 + _bmad/_config/ides/cursor.yaml | 2 +- _bmad/_config/ides/opencode.yaml | 2 +- _bmad/_config/manifest.yaml | 27 +- _bmad/_config/workflow-manifest.csv | 24 +- _bmad/_memory/config.yaml | 4 +- _bmad/bmb/config.yaml | 4 +- .../agent/steps-c/step-08-celebrate.md | 28 +- .../workflows/module/module-help-generate.md | 6 +- _bmad/bmm/agents/analyst.md | 2 +- _bmad/bmm/agents/architect.md | 2 +- _bmad/bmm/agents/dev.md | 2 +- _bmad/bmm/agents/pm.md | 2 +- _bmad/bmm/agents/qa.md | 2 +- _bmad/bmm/agents/quick-flow-solo-dev.md | 2 +- _bmad/bmm/agents/sm.md | 2 +- _bmad/bmm/agents/tech-writer/tech-writer.md | 4 +- _bmad/bmm/agents/ux-designer.md | 4 +- _bmad/bmm/config.yaml | 4 +- .../create-prd/steps-c/step-02-discovery.md | 2 +- .../create-prd/steps-c/step-02b-vision.md | 154 ++ .../steps-c/step-02c-executive-summary.md | 170 ++ .../create-prd/steps-v/step-v-01-discovery.md | 18 +- .../create-prd/workflow-validate-prd.md | 2 - .../code-review/workflow.yaml | 6 +- .../correct-course/instructions.md | 1 + .../correct-course/workflow.yaml | 4 +- .../create-story/checklist.md | 4 +- .../create-story/instructions.xml | 3 +- .../create-story/workflow.yaml | 22 +- .../dev-story/instructions.xml | 4 +- .../4-implementation/dev-story/workflow.yaml | 2 - .../retrospective/instructions.md | 27 +- .../retrospective/workflow.yaml | 4 +- .../sprint-planning/instructions.md | 1 + .../sprint-planning/workflow.yaml | 32 +- .../sprint-status/instructions.md | 1 + .../sprint-status/workflow.yaml | 7 +- .../bmad-quick-flow/quick-dev/workflow.md | 2 +- .../quick-spec/steps/step-01-understand.md | 2 +- .../bmad-quick-flow/quick-spec/workflow.md | 3 +- .../document-project/instructions.md | 125 +- .../templates/project-scan-report-schema.json | 4 +- .../workflows/document-project/workflow.yaml | 2 +- .../workflows/deep-dive-instructions.md | 8 +- .../document-project/workflows/deep-dive.yaml | 2 +- .../workflows/full-scan-instructions.md | 26 +- .../document-project/workflows/full-scan.yaml | 2 +- _bmad/bmm/workflows/qa/automate/workflow.yaml | 7 +- _bmad/cis/config.yaml | 4 +- _bmad/core/agents/bmad-master.md | 4 +- _bmad/core/config.yaml | 4 +- _bmad/external_model.rs | 555 +++++ crates/components/Cargo.toml | 16 +- crates/components/src/compressor.rs | 2018 +++++++++++++++++ crates/components/src/expansion_valve.rs | 1434 ++++++++++++ crates/components/src/external_model.rs | 628 +++++ crates/components/src/fan.rs | 636 ++++++ .../src/heat_exchanger/condenser.rs | 249 ++ .../src/heat_exchanger/condenser_coil.rs | 195 ++ .../src/heat_exchanger/economizer.rs | 251 ++ .../components/src/heat_exchanger/eps_ntu.rs | 344 +++ .../src/heat_exchanger/evaporator.rs | 292 +++ .../src/heat_exchanger/evaporator_coil.rs | 208 ++ .../src/heat_exchanger/exchanger.rs | 28 +- crates/components/src/heat_exchanger/lmtd.rs | 398 ++++ crates/components/src/heat_exchanger/mod.rs | 51 + crates/components/src/heat_exchanger/model.rs | 204 ++ crates/components/src/pipe.rs | 1011 +++++++++ crates/components/src/polynomials.rs | 702 ++++++ crates/components/src/port.rs | 753 ++++++ crates/components/src/pump.rs | 780 +++++++ crates/components/src/state_machine.rs | 940 ++++++++ crates/core/Cargo.toml | 1 + crates/core/src/calib.rs | 175 ++ crates/core/src/lib.rs | 9 +- crates/core/src/types.rs | 228 ++ crates/fluids/Cargo.toml | 28 + crates/fluids/benches/cache_10k.rs | 54 + crates/fluids/build.rs | 18 + crates/fluids/coolprop-sys/build.rs | 64 + crates/fluids/coolprop-sys/src/lib.rs | 336 +++ crates/fluids/data/r134a.json | 63 + crates/fluids/src/backend.rs | 166 ++ crates/fluids/src/cache.rs | 235 ++ crates/fluids/src/cached_backend.rs | 174 ++ crates/fluids/src/coolprop.rs | 647 ++++++ crates/fluids/src/damped_backend.rs | 341 +++ crates/fluids/src/damping.rs | 452 ++++ crates/fluids/src/errors.rs | 104 + crates/fluids/src/incompressible.rs | 578 +++++ crates/fluids/src/lib.rs | 69 + crates/fluids/src/mixture.rs | 357 +++ crates/fluids/src/tabular/generator.rs | 273 +++ crates/fluids/src/tabular/interpolate.rs | 152 ++ crates/fluids/src/tabular/mod.rs | 11 + crates/fluids/src/tabular/table.rs | 286 +++ crates/fluids/src/tabular_backend.rs | 543 +++++ crates/fluids/src/test_backend.rs | 430 ++++ crates/fluids/src/types.rs | 369 +++ crates/solver/Cargo.toml | 23 + crates/solver/src/coupling.rs | 435 ++++ crates/solver/src/error.rs | 72 + crates/solver/src/graph.rs | 6 + crates/solver/src/initializer.rs | 675 ++++++ crates/solver/src/jacobian.rs | 20 + crates/solver/src/lib.rs | 33 + crates/solver/src/solver.rs | 204 +- crates/solver/src/system.rs | 1608 +++++++++++++ crates/solver/tests/fallback_solver.rs | 672 ++++++ crates/solver/tests/multi_circuit.rs | 239 ++ crates/solver/tests/newton_convergence.rs | 480 ++++ crates/solver/tests/newton_raphson.rs | 254 +++ crates/solver/tests/picard_sequential.rs | 410 ++++ crates/solver/tests/smart_initializer.rs | 267 +++ demo/Cargo.toml | 44 + demo/README.md | 141 ++ demo/eurovent_report.html | 422 ++++ demo/src/bin/chiller.rs | 563 +++++ demo/src/bin/eurovent.rs | 10 + demo/src/bin/expansion_valve.rs | 32 + demo/src/bin/pipe.rs | 40 + demo/src/bin/ports.rs | 43 + demo/src/bin/pump.rs | 43 + demo/src/bin/pump_compressor_polynomials.rs | 131 ++ demo/src/bin/thermal_coupling.rs | 428 ++++ demo/src/bin/ui_server.rs | 313 +++ demo/src/main.rs | 107 + ui/README.md | 31 + ui/index.html | 381 ++++ 271 files changed, 28614 insertions(+), 447 deletions(-) create mode 100644 .clinerules/workflows/bmad-agent-bmad-master.md create mode 100644 .clinerules/workflows/bmad-agent-bmb-agent-builder.md create mode 100644 .clinerules/workflows/bmad-agent-bmb-module-builder.md create mode 100644 .clinerules/workflows/bmad-agent-bmb-workflow-builder.md create mode 100644 .clinerules/workflows/bmad-agent-bmm-analyst.md create mode 100644 .clinerules/workflows/bmad-agent-bmm-architect.md create mode 100644 .clinerules/workflows/bmad-agent-bmm-dev.md create mode 100644 .clinerules/workflows/bmad-agent-bmm-pm.md create mode 100644 .clinerules/workflows/bmad-agent-bmm-qa.md create mode 100644 .clinerules/workflows/bmad-agent-bmm-quick-flow-solo-dev.md create mode 100644 .clinerules/workflows/bmad-agent-bmm-sm.md create mode 100644 .clinerules/workflows/bmad-agent-bmm-tech-writer.md create mode 100644 .clinerules/workflows/bmad-agent-bmm-ux-designer.md create mode 100644 .clinerules/workflows/bmad-agent-cis-brainstorming-coach.md create mode 100644 .clinerules/workflows/bmad-agent-cis-creative-problem-solver.md create mode 100644 .clinerules/workflows/bmad-agent-cis-design-thinking-coach.md create mode 100644 .clinerules/workflows/bmad-agent-cis-innovation-strategist.md create mode 100644 .clinerules/workflows/bmad-agent-cis-presentation-master.md create mode 100644 .clinerules/workflows/bmad-agent-cis-storyteller.md create mode 100644 .clinerules/workflows/bmad-bmb-create-agent.md create mode 100644 .clinerules/workflows/bmad-bmb-create-module-brief.md create mode 100644 .clinerules/workflows/bmad-bmb-create-module.md create mode 100644 .clinerules/workflows/bmad-bmb-create-workflow.md create mode 100644 .clinerules/workflows/bmad-bmb-edit-agent.md create mode 100644 .clinerules/workflows/bmad-bmb-edit-module.md create mode 100644 .clinerules/workflows/bmad-bmb-edit-workflow.md create mode 100644 .clinerules/workflows/bmad-bmb-rework-workflow.md create mode 100644 .clinerules/workflows/bmad-bmb-validate-agent.md create mode 100644 .clinerules/workflows/bmad-bmb-validate-max-parallel-workflow.md create mode 100644 .clinerules/workflows/bmad-bmb-validate-module.md create mode 100644 .clinerules/workflows/bmad-bmb-validate-workflow.md create mode 100644 .clinerules/workflows/bmad-bmm-check-implementation-readiness.md create mode 100644 .clinerules/workflows/bmad-bmm-code-review.md create mode 100644 .clinerules/workflows/bmad-bmm-correct-course.md create mode 100644 .clinerules/workflows/bmad-bmm-create-architecture.md create mode 100644 .clinerules/workflows/bmad-bmm-create-epics-and-stories.md create mode 100644 .clinerules/workflows/bmad-bmm-create-prd.md create mode 100644 .clinerules/workflows/bmad-bmm-create-product-brief.md create mode 100644 .clinerules/workflows/bmad-bmm-create-story.md create mode 100644 .clinerules/workflows/bmad-bmm-create-ux-design.md create mode 100644 .clinerules/workflows/bmad-bmm-dev-story.md create mode 100644 .clinerules/workflows/bmad-bmm-document-project.md create mode 100644 .clinerules/workflows/bmad-bmm-domain-research.md create mode 100644 .clinerules/workflows/bmad-bmm-edit-prd.md create mode 100644 .clinerules/workflows/bmad-bmm-generate-project-context.md create mode 100644 .clinerules/workflows/bmad-bmm-market-research.md create mode 100644 .clinerules/workflows/bmad-bmm-qa-automate.md create mode 100644 .clinerules/workflows/bmad-bmm-quick-dev.md create mode 100644 .clinerules/workflows/bmad-bmm-quick-spec.md create mode 100644 .clinerules/workflows/bmad-bmm-retrospective.md create mode 100644 .clinerules/workflows/bmad-bmm-sprint-planning.md create mode 100644 .clinerules/workflows/bmad-bmm-sprint-status.md create mode 100644 .clinerules/workflows/bmad-bmm-technical-research.md create mode 100644 .clinerules/workflows/bmad-bmm-validate-prd.md create mode 100644 .clinerules/workflows/bmad-brainstorming.md create mode 100644 .clinerules/workflows/bmad-cis-design-thinking.md create mode 100644 .clinerules/workflows/bmad-cis-innovation-strategy.md create mode 100644 .clinerules/workflows/bmad-cis-problem-solving.md create mode 100644 .clinerules/workflows/bmad-cis-storytelling.md create mode 100644 .clinerules/workflows/bmad-editorial-review-prose.md create mode 100644 .clinerules/workflows/bmad-editorial-review-structure.md create mode 100644 .clinerules/workflows/bmad-help.md create mode 100644 .clinerules/workflows/bmad-index-docs.md create mode 100644 .clinerules/workflows/bmad-party-mode.md create mode 100644 .clinerules/workflows/bmad-review-adversarial-general.md create mode 100644 .clinerules/workflows/bmad-shard-doc.md create mode 100644 EXAMPLES.md create mode 100644 README_STORY_1_3.md create mode 100644 _bmad/_config/ides/cline.yaml create mode 100644 _bmad/bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02b-vision.md create mode 100644 _bmad/bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02c-executive-summary.md create mode 100644 _bmad/external_model.rs create mode 100644 crates/components/src/compressor.rs create mode 100644 crates/components/src/expansion_valve.rs create mode 100644 crates/components/src/external_model.rs create mode 100644 crates/components/src/fan.rs create mode 100644 crates/components/src/heat_exchanger/condenser.rs create mode 100644 crates/components/src/heat_exchanger/condenser_coil.rs create mode 100644 crates/components/src/heat_exchanger/economizer.rs create mode 100644 crates/components/src/heat_exchanger/eps_ntu.rs create mode 100644 crates/components/src/heat_exchanger/evaporator.rs create mode 100644 crates/components/src/heat_exchanger/evaporator_coil.rs create mode 100644 crates/components/src/heat_exchanger/lmtd.rs create mode 100644 crates/components/src/heat_exchanger/mod.rs create mode 100644 crates/components/src/heat_exchanger/model.rs create mode 100644 crates/components/src/pipe.rs create mode 100644 crates/components/src/polynomials.rs create mode 100644 crates/components/src/port.rs create mode 100644 crates/components/src/pump.rs create mode 100644 crates/components/src/state_machine.rs create mode 100644 crates/core/src/calib.rs create mode 100644 crates/fluids/Cargo.toml create mode 100644 crates/fluids/benches/cache_10k.rs create mode 100644 crates/fluids/build.rs create mode 100644 crates/fluids/coolprop-sys/build.rs create mode 100644 crates/fluids/coolprop-sys/src/lib.rs create mode 100644 crates/fluids/data/r134a.json create mode 100644 crates/fluids/src/backend.rs create mode 100644 crates/fluids/src/cache.rs create mode 100644 crates/fluids/src/cached_backend.rs create mode 100644 crates/fluids/src/coolprop.rs create mode 100644 crates/fluids/src/damped_backend.rs create mode 100644 crates/fluids/src/damping.rs create mode 100644 crates/fluids/src/errors.rs create mode 100644 crates/fluids/src/incompressible.rs create mode 100644 crates/fluids/src/lib.rs create mode 100644 crates/fluids/src/mixture.rs create mode 100644 crates/fluids/src/tabular/generator.rs create mode 100644 crates/fluids/src/tabular/interpolate.rs create mode 100644 crates/fluids/src/tabular/mod.rs create mode 100644 crates/fluids/src/tabular/table.rs create mode 100644 crates/fluids/src/tabular_backend.rs create mode 100644 crates/fluids/src/test_backend.rs create mode 100644 crates/fluids/src/types.rs create mode 100644 crates/solver/Cargo.toml create mode 100644 crates/solver/src/coupling.rs create mode 100644 crates/solver/src/error.rs create mode 100644 crates/solver/src/graph.rs create mode 100644 crates/solver/src/initializer.rs create mode 100644 crates/solver/src/lib.rs create mode 100644 crates/solver/src/system.rs create mode 100644 crates/solver/tests/fallback_solver.rs create mode 100644 crates/solver/tests/multi_circuit.rs create mode 100644 crates/solver/tests/newton_convergence.rs create mode 100644 crates/solver/tests/newton_raphson.rs create mode 100644 crates/solver/tests/picard_sequential.rs create mode 100644 crates/solver/tests/smart_initializer.rs create mode 100644 demo/Cargo.toml create mode 100644 demo/README.md create mode 100644 demo/eurovent_report.html create mode 100644 demo/src/bin/chiller.rs create mode 100644 demo/src/bin/expansion_valve.rs create mode 100644 demo/src/bin/pipe.rs create mode 100644 demo/src/bin/ports.rs create mode 100644 demo/src/bin/pump.rs create mode 100644 demo/src/bin/pump_compressor_polynomials.rs create mode 100644 demo/src/bin/thermal_coupling.rs create mode 100644 demo/src/bin/ui_server.rs create mode 100644 demo/src/main.rs create mode 100644 ui/README.md create mode 100644 ui/index.html diff --git a/.clinerules/workflows/bmad-agent-bmad-master.md b/.clinerules/workflows/bmad-agent-bmad-master.md new file mode 100644 index 0000000..fcf0a08 --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmad-master.md @@ -0,0 +1,16 @@ +--- +name: 'bmad-master' +description: 'bmad-master agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/core/agents/bmad-master.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-bmb-agent-builder.md b/.clinerules/workflows/bmad-agent-bmb-agent-builder.md new file mode 100644 index 0000000..33c52b8 --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmb-agent-builder.md @@ -0,0 +1,16 @@ +--- +name: 'agent-builder' +description: 'agent-builder agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/bmb/agents/agent-builder.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-bmb-module-builder.md b/.clinerules/workflows/bmad-agent-bmb-module-builder.md new file mode 100644 index 0000000..2960169 --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmb-module-builder.md @@ -0,0 +1,16 @@ +--- +name: 'module-builder' +description: 'module-builder agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/bmb/agents/module-builder.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-bmb-workflow-builder.md b/.clinerules/workflows/bmad-agent-bmb-workflow-builder.md new file mode 100644 index 0000000..c0c866d --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmb-workflow-builder.md @@ -0,0 +1,16 @@ +--- +name: 'workflow-builder' +description: 'workflow-builder agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/bmb/agents/workflow-builder.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-bmm-analyst.md b/.clinerules/workflows/bmad-agent-bmm-analyst.md new file mode 100644 index 0000000..bac849d --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmm-analyst.md @@ -0,0 +1,16 @@ +--- +name: 'analyst' +description: 'analyst agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/bmm/agents/analyst.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-bmm-architect.md b/.clinerules/workflows/bmad-agent-bmm-architect.md new file mode 100644 index 0000000..fc4ead3 --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmm-architect.md @@ -0,0 +1,16 @@ +--- +name: 'architect' +description: 'architect agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/bmm/agents/architect.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-bmm-dev.md b/.clinerules/workflows/bmad-agent-bmm-dev.md new file mode 100644 index 0000000..e2d927e --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmm-dev.md @@ -0,0 +1,16 @@ +--- +name: 'dev' +description: 'dev agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/bmm/agents/dev.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-bmm-pm.md b/.clinerules/workflows/bmad-agent-bmm-pm.md new file mode 100644 index 0000000..d1b8daa --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmm-pm.md @@ -0,0 +1,16 @@ +--- +name: 'pm' +description: 'pm agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/bmm/agents/pm.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-bmm-qa.md b/.clinerules/workflows/bmad-agent-bmm-qa.md new file mode 100644 index 0000000..d8fef81 --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmm-qa.md @@ -0,0 +1,16 @@ +--- +name: 'qa' +description: 'qa agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/bmm/agents/qa.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-bmm-quick-flow-solo-dev.md b/.clinerules/workflows/bmad-agent-bmm-quick-flow-solo-dev.md new file mode 100644 index 0000000..c8e1840 --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmm-quick-flow-solo-dev.md @@ -0,0 +1,16 @@ +--- +name: 'quick-flow-solo-dev' +description: 'quick-flow-solo-dev agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/bmm/agents/quick-flow-solo-dev.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-bmm-sm.md b/.clinerules/workflows/bmad-agent-bmm-sm.md new file mode 100644 index 0000000..c7ee7db --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmm-sm.md @@ -0,0 +1,16 @@ +--- +name: 'sm' +description: 'sm agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/bmm/agents/sm.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-bmm-tech-writer.md b/.clinerules/workflows/bmad-agent-bmm-tech-writer.md new file mode 100644 index 0000000..c020ac9 --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmm-tech-writer.md @@ -0,0 +1,16 @@ +--- +name: 'tech-writer' +description: 'tech-writer agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/bmm/agents/tech-writer/tech-writer.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-bmm-ux-designer.md b/.clinerules/workflows/bmad-agent-bmm-ux-designer.md new file mode 100644 index 0000000..5dbb42b --- /dev/null +++ b/.clinerules/workflows/bmad-agent-bmm-ux-designer.md @@ -0,0 +1,16 @@ +--- +name: 'ux-designer' +description: 'ux-designer agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/bmm/agents/ux-designer.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-cis-brainstorming-coach.md b/.clinerules/workflows/bmad-agent-cis-brainstorming-coach.md new file mode 100644 index 0000000..c581ebe --- /dev/null +++ b/.clinerules/workflows/bmad-agent-cis-brainstorming-coach.md @@ -0,0 +1,16 @@ +--- +name: 'brainstorming-coach' +description: 'brainstorming-coach agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/cis/agents/brainstorming-coach.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-cis-creative-problem-solver.md b/.clinerules/workflows/bmad-agent-cis-creative-problem-solver.md new file mode 100644 index 0000000..7974ff4 --- /dev/null +++ b/.clinerules/workflows/bmad-agent-cis-creative-problem-solver.md @@ -0,0 +1,16 @@ +--- +name: 'creative-problem-solver' +description: 'creative-problem-solver agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/cis/agents/creative-problem-solver.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-cis-design-thinking-coach.md b/.clinerules/workflows/bmad-agent-cis-design-thinking-coach.md new file mode 100644 index 0000000..fc2a589 --- /dev/null +++ b/.clinerules/workflows/bmad-agent-cis-design-thinking-coach.md @@ -0,0 +1,16 @@ +--- +name: 'design-thinking-coach' +description: 'design-thinking-coach agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/cis/agents/design-thinking-coach.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-cis-innovation-strategist.md b/.clinerules/workflows/bmad-agent-cis-innovation-strategist.md new file mode 100644 index 0000000..df0e4b2 --- /dev/null +++ b/.clinerules/workflows/bmad-agent-cis-innovation-strategist.md @@ -0,0 +1,16 @@ +--- +name: 'innovation-strategist' +description: 'innovation-strategist agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/cis/agents/innovation-strategist.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-cis-presentation-master.md b/.clinerules/workflows/bmad-agent-cis-presentation-master.md new file mode 100644 index 0000000..830129b --- /dev/null +++ b/.clinerules/workflows/bmad-agent-cis-presentation-master.md @@ -0,0 +1,16 @@ +--- +name: 'presentation-master' +description: 'presentation-master agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/cis/agents/presentation-master.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-agent-cis-storyteller.md b/.clinerules/workflows/bmad-agent-cis-storyteller.md new file mode 100644 index 0000000..b6b2266 --- /dev/null +++ b/.clinerules/workflows/bmad-agent-cis-storyteller.md @@ -0,0 +1,16 @@ +--- +name: 'storyteller' +description: 'storyteller agent' +disable-model-invocation: true +--- + +You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. + + +1. LOAD the FULL agent file from {project-root}/_bmad/cis/agents/storyteller/storyteller.md +2. READ its entire contents - this contains the complete agent persona, menu, and instructions +3. FOLLOW every step in the section precisely +4. DISPLAY the welcome/greeting as instructed +5. PRESENT the numbered menu +6. WAIT for user input before proceeding + diff --git a/.clinerules/workflows/bmad-bmb-create-agent.md b/.clinerules/workflows/bmad-bmb-create-agent.md new file mode 100644 index 0000000..286091e --- /dev/null +++ b/.clinerules/workflows/bmad-bmb-create-agent.md @@ -0,0 +1,10 @@ +--- +description: 'Create a new BMAD agent with best practices and compliance' +auto_execution_mode: "iterate" +--- + +# create-agent + +Read the entire workflow file at {project-root}/_bmad/bmb/workflows/agent/workflow-create-agent.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmb-create-module-brief.md b/.clinerules/workflows/bmad-bmb-create-module-brief.md new file mode 100644 index 0000000..4278799 --- /dev/null +++ b/.clinerules/workflows/bmad-bmb-create-module-brief.md @@ -0,0 +1,10 @@ +--- +description: 'Create product brief for BMAD module development' +auto_execution_mode: "iterate" +--- + +# create-module-brief + +Read the entire workflow file at {project-root}/_bmad/bmb/workflows/module/workflow-create-module-brief.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmb-create-module.md b/.clinerules/workflows/bmad-bmb-create-module.md new file mode 100644 index 0000000..77f4ca8 --- /dev/null +++ b/.clinerules/workflows/bmad-bmb-create-module.md @@ -0,0 +1,10 @@ +--- +description: 'Create a complete BMAD module with agents, workflows, and infrastructure' +auto_execution_mode: "iterate" +--- + +# create-module + +Read the entire workflow file at {project-root}/_bmad/bmb/workflows/module/workflow-create-module.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmb-create-workflow.md b/.clinerules/workflows/bmad-bmb-create-workflow.md new file mode 100644 index 0000000..f9e03f5 --- /dev/null +++ b/.clinerules/workflows/bmad-bmb-create-workflow.md @@ -0,0 +1,10 @@ +--- +description: 'Create a new BMAD workflow with proper structure and best practices' +auto_execution_mode: "iterate" +--- + +# create-workflow + +Read the entire workflow file at {project-root}/_bmad/bmb/workflows/workflow/workflow-create-workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmb-edit-agent.md b/.clinerules/workflows/bmad-bmb-edit-agent.md new file mode 100644 index 0000000..0c4db95 --- /dev/null +++ b/.clinerules/workflows/bmad-bmb-edit-agent.md @@ -0,0 +1,10 @@ +--- +description: 'Edit existing BMAD agents while maintaining compliance' +auto_execution_mode: "iterate" +--- + +# edit-agent + +Read the entire workflow file at {project-root}/_bmad/bmb/workflows/agent/workflow-edit-agent.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmb-edit-module.md b/.clinerules/workflows/bmad-bmb-edit-module.md new file mode 100644 index 0000000..3cf70a2 --- /dev/null +++ b/.clinerules/workflows/bmad-bmb-edit-module.md @@ -0,0 +1,10 @@ +--- +description: 'Edit existing BMAD modules while maintaining coherence' +auto_execution_mode: "iterate" +--- + +# edit-module + +Read the entire workflow file at {project-root}/_bmad/bmb/workflows/module/workflow-edit-module.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmb-edit-workflow.md b/.clinerules/workflows/bmad-bmb-edit-workflow.md new file mode 100644 index 0000000..e40c7e0 --- /dev/null +++ b/.clinerules/workflows/bmad-bmb-edit-workflow.md @@ -0,0 +1,10 @@ +--- +description: 'Edit existing BMAD workflows while maintaining integrity' +auto_execution_mode: "iterate" +--- + +# edit-workflow + +Read the entire workflow file at {project-root}/_bmad/bmb/workflows/workflow/workflow-edit-workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmb-rework-workflow.md b/.clinerules/workflows/bmad-bmb-rework-workflow.md new file mode 100644 index 0000000..0f54b43 --- /dev/null +++ b/.clinerules/workflows/bmad-bmb-rework-workflow.md @@ -0,0 +1,10 @@ +--- +description: 'Rework a Workflow to a V6 Compliant Version' +auto_execution_mode: "iterate" +--- + +# rework-workflow + +Read the entire workflow file at {project-root}/_bmad/bmb/workflows/workflow/workflow-rework-workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmb-validate-agent.md b/.clinerules/workflows/bmad-bmb-validate-agent.md new file mode 100644 index 0000000..d4f62fa --- /dev/null +++ b/.clinerules/workflows/bmad-bmb-validate-agent.md @@ -0,0 +1,10 @@ +--- +description: 'Validate existing BMAD agents and offer to improve deficiencies' +auto_execution_mode: "iterate" +--- + +# validate-agent + +Read the entire workflow file at {project-root}/_bmad/bmb/workflows/agent/workflow-validate-agent.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmb-validate-max-parallel-workflow.md b/.clinerules/workflows/bmad-bmb-validate-max-parallel-workflow.md new file mode 100644 index 0000000..221ae9d --- /dev/null +++ b/.clinerules/workflows/bmad-bmb-validate-max-parallel-workflow.md @@ -0,0 +1,10 @@ +--- +description: 'Run validation checks in MAX-PARALLEL mode against a workflow requires a tool that supports Parallel Sub-Processes' +auto_execution_mode: "iterate" +--- + +# validate-max-parallel-workflow + +Read the entire workflow file at {project-root}/_bmad/bmb/workflows/workflow/workflow-validate-max-parallel-workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmb-validate-module.md b/.clinerules/workflows/bmad-bmb-validate-module.md new file mode 100644 index 0000000..0111214 --- /dev/null +++ b/.clinerules/workflows/bmad-bmb-validate-module.md @@ -0,0 +1,10 @@ +--- +description: 'Run compliance check on BMAD modules against best practices' +auto_execution_mode: "iterate" +--- + +# validate-module + +Read the entire workflow file at {project-root}/_bmad/bmb/workflows/module/workflow-validate-module.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmb-validate-workflow.md b/.clinerules/workflows/bmad-bmb-validate-workflow.md new file mode 100644 index 0000000..558a503 --- /dev/null +++ b/.clinerules/workflows/bmad-bmb-validate-workflow.md @@ -0,0 +1,10 @@ +--- +description: 'Run validation check on BMAD workflows against best practices' +auto_execution_mode: "iterate" +--- + +# validate-workflow + +Read the entire workflow file at {project-root}/_bmad/bmb/workflows/workflow/workflow-validate-workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-check-implementation-readiness.md b/.clinerules/workflows/bmad-bmm-check-implementation-readiness.md new file mode 100644 index 0000000..f70e8b5 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-check-implementation-readiness.md @@ -0,0 +1,10 @@ +--- +description: 'Critical validation workflow that assesses PRD, Architecture, and Epics & Stories for completeness and alignment before implementation. Uses adversarial review approach to find gaps and issues.' +auto_execution_mode: "iterate" +--- + +# check-implementation-readiness + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/3-solutioning/check-implementation-readiness/workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-code-review.md b/.clinerules/workflows/bmad-bmm-code-review.md new file mode 100644 index 0000000..83b5a6a --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-code-review.md @@ -0,0 +1,15 @@ +--- +name: 'code-review' +description: 'Perform an ADVERSARIAL Senior Developer code review that finds 3-10 specific problems in every story. Challenges everything: code quality, test coverage, architecture compliance, security, performance. NEVER accepts `looks good` - must find minimum issues and can auto-fix with user approval.' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/bmm/workflows/4-implementation/code-review/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/bmm/workflows/4-implementation/code-review/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-bmm-correct-course.md b/.clinerules/workflows/bmad-bmm-correct-course.md new file mode 100644 index 0000000..90359d0 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-correct-course.md @@ -0,0 +1,15 @@ +--- +name: 'correct-course' +description: 'Navigate significant changes during sprint execution by analyzing impact, proposing solutions, and routing for implementation' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/bmm/workflows/4-implementation/correct-course/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/bmm/workflows/4-implementation/correct-course/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-bmm-create-architecture.md b/.clinerules/workflows/bmad-bmm-create-architecture.md new file mode 100644 index 0000000..86351ff --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-create-architecture.md @@ -0,0 +1,10 @@ +--- +description: 'Collaborative architectural decision facilitation for AI-agent consistency. Replaces template-driven architecture with intelligent, adaptive conversation that produces a decision-focused architecture document optimized for preventing agent conflicts.' +auto_execution_mode: "iterate" +--- + +# create-architecture + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/3-solutioning/create-architecture/workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-create-epics-and-stories.md b/.clinerules/workflows/bmad-bmm-create-epics-and-stories.md new file mode 100644 index 0000000..7c12ed0 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-create-epics-and-stories.md @@ -0,0 +1,10 @@ +--- +description: 'Transform PRD requirements and Architecture decisions into comprehensive stories organized by user value. This workflow requires completed PRD + Architecture documents (UX recommended if UI exists) and breaks down requirements into implementation-ready epics and user stories that incorporate all available technical and design context. Creates detailed, actionable stories with complete acceptance criteria for development teams.' +auto_execution_mode: "iterate" +--- + +# create-epics-and-stories + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/3-solutioning/create-epics-and-stories/workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-create-prd.md b/.clinerules/workflows/bmad-bmm-create-prd.md new file mode 100644 index 0000000..1b246b4 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-create-prd.md @@ -0,0 +1,10 @@ +--- +description: 'Create a comprehensive PRD (Product Requirements Document) through structured workflow facilitation' +auto_execution_mode: "iterate" +--- + +# create-prd + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-create-prd.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-create-product-brief.md b/.clinerules/workflows/bmad-bmm-create-product-brief.md new file mode 100644 index 0000000..9d49d49 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-create-product-brief.md @@ -0,0 +1,10 @@ +--- +description: 'Create comprehensive product briefs through collaborative step-by-step discovery as creative Business Analyst working with the user as peers.' +auto_execution_mode: "iterate" +--- + +# create-product-brief + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/1-analysis/create-product-brief/workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-create-story.md b/.clinerules/workflows/bmad-bmm-create-story.md new file mode 100644 index 0000000..b6ed179 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-create-story.md @@ -0,0 +1,15 @@ +--- +name: 'create-story' +description: 'Create the next user story from epics+stories with enhanced context analysis and direct ready-for-dev marking' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-bmm-create-ux-design.md b/.clinerules/workflows/bmad-bmm-create-ux-design.md new file mode 100644 index 0000000..10b2be0 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-create-ux-design.md @@ -0,0 +1,10 @@ +--- +description: 'Work with a peer UX Design expert to plan your applications UX patterns, look and feel.' +auto_execution_mode: "iterate" +--- + +# create-ux-design + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/2-plan-workflows/create-ux-design/workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-dev-story.md b/.clinerules/workflows/bmad-bmm-dev-story.md new file mode 100644 index 0000000..c3b073f --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-dev-story.md @@ -0,0 +1,15 @@ +--- +name: 'dev-story' +description: 'Execute a story by implementing tasks/subtasks, writing tests, validating, and updating the story file per acceptance criteria' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/bmm/workflows/4-implementation/dev-story/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/bmm/workflows/4-implementation/dev-story/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-bmm-document-project.md b/.clinerules/workflows/bmad-bmm-document-project.md new file mode 100644 index 0000000..3de1703 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-document-project.md @@ -0,0 +1,15 @@ +--- +name: 'document-project' +description: 'Analyzes and documents brownfield projects by scanning codebase, architecture, and patterns to create comprehensive reference documentation for AI-assisted development' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/bmm/workflows/document-project/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/bmm/workflows/document-project/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-bmm-domain-research.md b/.clinerules/workflows/bmad-bmm-domain-research.md new file mode 100644 index 0000000..e4712ae --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-domain-research.md @@ -0,0 +1,10 @@ +--- +description: 'Conduct domain research covering industry analysis, regulations, technology trends, and ecosystem dynamics using current web data and verified sources.' +auto_execution_mode: "iterate" +--- + +# domain-research + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/1-analysis/research/workflow-domain-research.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-edit-prd.md b/.clinerules/workflows/bmad-bmm-edit-prd.md new file mode 100644 index 0000000..dbcc4cf --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-edit-prd.md @@ -0,0 +1,10 @@ +--- +description: 'Edit and improve an existing PRD - enhance clarity, completeness, and quality' +auto_execution_mode: "iterate" +--- + +# edit-prd + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-edit-prd.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-generate-project-context.md b/.clinerules/workflows/bmad-bmm-generate-project-context.md new file mode 100644 index 0000000..8708db4 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-generate-project-context.md @@ -0,0 +1,10 @@ +--- +description: 'Creates a concise project-context.md file with critical rules and patterns that AI agents must follow when implementing code. Optimized for LLM context efficiency.' +auto_execution_mode: "iterate" +--- + +# generate-project-context + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/generate-project-context/workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-market-research.md b/.clinerules/workflows/bmad-bmm-market-research.md new file mode 100644 index 0000000..a605886 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-market-research.md @@ -0,0 +1,10 @@ +--- +description: 'Conduct market research covering market size, growth, competition, and customer insights using current web data and verified sources.' +auto_execution_mode: "iterate" +--- + +# market-research + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/1-analysis/research/workflow-market-research.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-qa-automate.md b/.clinerules/workflows/bmad-bmm-qa-automate.md new file mode 100644 index 0000000..8fd7c01 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-qa-automate.md @@ -0,0 +1,15 @@ +--- +name: 'qa-automate' +description: 'Generate tests quickly for existing features using standard test patterns' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/bmm/workflows/qa/automate/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/bmm/workflows/qa/automate/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-bmm-quick-dev.md b/.clinerules/workflows/bmad-bmm-quick-dev.md new file mode 100644 index 0000000..90c5545 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-quick-dev.md @@ -0,0 +1,10 @@ +--- +description: 'Flexible development - execute tech-specs OR direct instructions with optional planning.' +auto_execution_mode: "iterate" +--- + +# quick-dev + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/bmad-quick-flow/quick-dev/workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-quick-spec.md b/.clinerules/workflows/bmad-bmm-quick-spec.md new file mode 100644 index 0000000..139608b --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-quick-spec.md @@ -0,0 +1,10 @@ +--- +description: 'Conversational spec engineering - ask questions, investigate code, produce implementation-ready tech-spec.' +auto_execution_mode: "iterate" +--- + +# quick-spec + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/bmad-quick-flow/quick-spec/workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-retrospective.md b/.clinerules/workflows/bmad-bmm-retrospective.md new file mode 100644 index 0000000..ae16aaf --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-retrospective.md @@ -0,0 +1,15 @@ +--- +name: 'retrospective' +description: 'Run after epic completion to review overall success, extract lessons learned, and explore if new information emerged that might impact the next epic' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/bmm/workflows/4-implementation/retrospective/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/bmm/workflows/4-implementation/retrospective/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-bmm-sprint-planning.md b/.clinerules/workflows/bmad-bmm-sprint-planning.md new file mode 100644 index 0000000..1026360 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-sprint-planning.md @@ -0,0 +1,15 @@ +--- +name: 'sprint-planning' +description: 'Generate and manage the sprint status tracking file for Phase 4 implementation, extracting all epics and stories from epic files and tracking their status through the development lifecycle' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/bmm/workflows/4-implementation/sprint-planning/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/bmm/workflows/4-implementation/sprint-planning/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-bmm-sprint-status.md b/.clinerules/workflows/bmad-bmm-sprint-status.md new file mode 100644 index 0000000..edc1c7c --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-sprint-status.md @@ -0,0 +1,15 @@ +--- +name: 'sprint-status' +description: 'Summarize sprint-status.yaml, surface risks, and route to the right implementation workflow.' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/bmm/workflows/4-implementation/sprint-status/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/bmm/workflows/4-implementation/sprint-status/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-bmm-technical-research.md b/.clinerules/workflows/bmad-bmm-technical-research.md new file mode 100644 index 0000000..5f76628 --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-technical-research.md @@ -0,0 +1,10 @@ +--- +description: 'Conduct technical research covering technology evaluation, architecture decisions, and implementation approaches using current web data and verified sources.' +auto_execution_mode: "iterate" +--- + +# technical-research + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/1-analysis/research/workflow-technical-research.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-bmm-validate-prd.md b/.clinerules/workflows/bmad-bmm-validate-prd.md new file mode 100644 index 0000000..48cba4b --- /dev/null +++ b/.clinerules/workflows/bmad-bmm-validate-prd.md @@ -0,0 +1,10 @@ +--- +description: 'Validate an existing PRD against BMAD standards - comprehensive review for completeness, clarity, and quality' +auto_execution_mode: "iterate" +--- + +# validate-prd + +Read the entire workflow file at {project-root}/_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-validate-prd.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-brainstorming.md b/.clinerules/workflows/bmad-brainstorming.md new file mode 100644 index 0000000..0e61188 --- /dev/null +++ b/.clinerules/workflows/bmad-brainstorming.md @@ -0,0 +1,10 @@ +--- +description: 'Facilitate interactive brainstorming sessions using diverse creative techniques and ideation methods' +auto_execution_mode: "iterate" +--- + +# brainstorming + +Read the entire workflow file at {project-root}/_bmad/core/workflows/brainstorming/workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-cis-design-thinking.md b/.clinerules/workflows/bmad-cis-design-thinking.md new file mode 100644 index 0000000..6c8e636 --- /dev/null +++ b/.clinerules/workflows/bmad-cis-design-thinking.md @@ -0,0 +1,15 @@ +--- +name: 'design-thinking' +description: 'Guide human-centered design processes using empathy-driven methodologies. This workflow walks through the design thinking phases - Empathize, Define, Ideate, Prototype, and Test - to create solutions deeply rooted in user needs.' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/cis/workflows/design-thinking/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/cis/workflows/design-thinking/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-cis-innovation-strategy.md b/.clinerules/workflows/bmad-cis-innovation-strategy.md new file mode 100644 index 0000000..0b977eb --- /dev/null +++ b/.clinerules/workflows/bmad-cis-innovation-strategy.md @@ -0,0 +1,15 @@ +--- +name: 'innovation-strategy' +description: 'Identify disruption opportunities and architect business model innovation. This workflow guides strategic analysis of markets, competitive dynamics, and business model innovation to uncover sustainable competitive advantages and breakthrough opportunities.' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/cis/workflows/innovation-strategy/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/cis/workflows/innovation-strategy/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-cis-problem-solving.md b/.clinerules/workflows/bmad-cis-problem-solving.md new file mode 100644 index 0000000..9bfa669 --- /dev/null +++ b/.clinerules/workflows/bmad-cis-problem-solving.md @@ -0,0 +1,15 @@ +--- +name: 'problem-solving' +description: 'Apply systematic problem-solving methodologies to crack complex challenges. This workflow guides through problem diagnosis, root cause analysis, creative solution generation, evaluation, and implementation planning using proven frameworks.' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/cis/workflows/problem-solving/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/cis/workflows/problem-solving/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-cis-storytelling.md b/.clinerules/workflows/bmad-cis-storytelling.md new file mode 100644 index 0000000..734cca4 --- /dev/null +++ b/.clinerules/workflows/bmad-cis-storytelling.md @@ -0,0 +1,15 @@ +--- +name: 'storytelling' +description: 'Craft compelling narratives using proven story frameworks and techniques. This workflow guides users through structured narrative development, applying appropriate story frameworks to create emotionally resonant and engaging stories for any purpose.' +disable-model-invocation: true +--- + +IT IS CRITICAL THAT YOU FOLLOW THESE STEPS - while staying in character as the current agent persona you may have loaded: + + +1. Always LOAD the FULL @{project-root}/_bmad/core/tasks/workflow.xml +2. READ its entire contents - this is the CORE OS for EXECUTING the specific workflow-config @{project-root}/_bmad/cis/workflows/storytelling/workflow.yaml +3. Pass the yaml path @{project-root}/_bmad/cis/workflows/storytelling/workflow.yaml as 'workflow-config' parameter to the workflow.xml instructions +4. Follow workflow.xml instructions EXACTLY as written to process and follow the specific workflow config and its instructions +5. Save outputs after EACH section when generating any documents from templates + diff --git a/.clinerules/workflows/bmad-editorial-review-prose.md b/.clinerules/workflows/bmad-editorial-review-prose.md new file mode 100644 index 0000000..3b6c00c --- /dev/null +++ b/.clinerules/workflows/bmad-editorial-review-prose.md @@ -0,0 +1,10 @@ +--- +name: 'editorial-review-prose' +description: 'Clinical copy-editor that reviews text for communication issues' +--- + +# editorial-review-prose + +Read the entire task file at: {project-root}/_bmad/core/tasks/editorial-review-prose.xml + +Follow all instructions in the task file exactly as written. diff --git a/.clinerules/workflows/bmad-editorial-review-structure.md b/.clinerules/workflows/bmad-editorial-review-structure.md new file mode 100644 index 0000000..f1128a5 --- /dev/null +++ b/.clinerules/workflows/bmad-editorial-review-structure.md @@ -0,0 +1,10 @@ +--- +name: 'editorial-review-structure' +description: 'Structural editor that proposes cuts, reorganization, and simplification while preserving comprehension' +--- + +# editorial-review-structure + +Read the entire task file at: {project-root}/_bmad/core/tasks/editorial-review-structure.xml + +Follow all instructions in the task file exactly as written. diff --git a/.clinerules/workflows/bmad-help.md b/.clinerules/workflows/bmad-help.md new file mode 100644 index 0000000..70af2fc --- /dev/null +++ b/.clinerules/workflows/bmad-help.md @@ -0,0 +1,10 @@ +--- +name: 'help' +description: 'Get unstuck by showing what workflow steps come next or answering questions about what to do' +--- + +# help + +Read the entire task file at: {project-root}/_bmad/core/tasks/help.md + +Follow all instructions in the task file exactly as written. diff --git a/.clinerules/workflows/bmad-index-docs.md b/.clinerules/workflows/bmad-index-docs.md new file mode 100644 index 0000000..e0d5b82 --- /dev/null +++ b/.clinerules/workflows/bmad-index-docs.md @@ -0,0 +1,10 @@ +--- +name: 'index-docs' +description: 'Generates or updates an index.md of all documents in the specified directory' +--- + +# index-docs + +Read the entire task file at: {project-root}/_bmad/core/tasks/index-docs.xml + +Follow all instructions in the task file exactly as written. diff --git a/.clinerules/workflows/bmad-party-mode.md b/.clinerules/workflows/bmad-party-mode.md new file mode 100644 index 0000000..8cf1ecf --- /dev/null +++ b/.clinerules/workflows/bmad-party-mode.md @@ -0,0 +1,10 @@ +--- +description: 'Orchestrates group discussions between all installed BMAD agents, enabling natural multi-agent conversations' +auto_execution_mode: "iterate" +--- + +# party-mode + +Read the entire workflow file at {project-root}/_bmad/core/workflows/party-mode/workflow.md + +Follow all instructions in the workflow file exactly as written. diff --git a/.clinerules/workflows/bmad-review-adversarial-general.md b/.clinerules/workflows/bmad-review-adversarial-general.md new file mode 100644 index 0000000..17e1a33 --- /dev/null +++ b/.clinerules/workflows/bmad-review-adversarial-general.md @@ -0,0 +1,10 @@ +--- +name: 'review-adversarial-general' +description: 'Cynically review content and produce findings' +--- + +# review-adversarial-general + +Read the entire task file at: {project-root}/_bmad/core/tasks/review-adversarial-general.xml + +Follow all instructions in the task file exactly as written. diff --git a/.clinerules/workflows/bmad-shard-doc.md b/.clinerules/workflows/bmad-shard-doc.md new file mode 100644 index 0000000..0a38656 --- /dev/null +++ b/.clinerules/workflows/bmad-shard-doc.md @@ -0,0 +1,10 @@ +--- +name: 'shard-doc' +description: 'Splits large markdown documents into smaller, organized files based on level 2 (default) sections' +--- + +# shard-doc + +Read the entire task file at: {project-root}/_bmad/core/tasks/shard-doc.xml + +Follow all instructions in the task file exactly as written. diff --git a/.opencode/agent/bmad-agent-bmad-master.md b/.opencode/agent/bmad-agent-bmad-master.md index b003c92..884a51c 100644 --- a/.opencode/agent/bmad-agent-bmad-master.md +++ b/.opencode/agent/bmad-agent-bmad-master.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'bmad-master' description: 'bmad-master agent' --- diff --git a/.opencode/agent/bmad-agent-bmb-agent-builder.md b/.opencode/agent/bmad-agent-bmb-agent-builder.md index 51a0b01..6183c76 100644 --- a/.opencode/agent/bmad-agent-bmb-agent-builder.md +++ b/.opencode/agent/bmad-agent-bmb-agent-builder.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'agent-builder' description: 'agent-builder agent' --- diff --git a/.opencode/agent/bmad-agent-bmb-module-builder.md b/.opencode/agent/bmad-agent-bmb-module-builder.md index a4d345b..14b3940 100644 --- a/.opencode/agent/bmad-agent-bmb-module-builder.md +++ b/.opencode/agent/bmad-agent-bmb-module-builder.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'module-builder' description: 'module-builder agent' --- diff --git a/.opencode/agent/bmad-agent-bmb-workflow-builder.md b/.opencode/agent/bmad-agent-bmb-workflow-builder.md index 7431b86..d877032 100644 --- a/.opencode/agent/bmad-agent-bmb-workflow-builder.md +++ b/.opencode/agent/bmad-agent-bmb-workflow-builder.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'workflow-builder' description: 'workflow-builder agent' --- diff --git a/.opencode/agent/bmad-agent-bmm-analyst.md b/.opencode/agent/bmad-agent-bmm-analyst.md index e47a40b..a13c201 100644 --- a/.opencode/agent/bmad-agent-bmm-analyst.md +++ b/.opencode/agent/bmad-agent-bmm-analyst.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'analyst' description: 'analyst agent' --- diff --git a/.opencode/agent/bmad-agent-bmm-architect.md b/.opencode/agent/bmad-agent-bmm-architect.md index 64be4da..15d1c1f 100644 --- a/.opencode/agent/bmad-agent-bmm-architect.md +++ b/.opencode/agent/bmad-agent-bmm-architect.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'architect' description: 'architect agent' --- diff --git a/.opencode/agent/bmad-agent-bmm-dev.md b/.opencode/agent/bmad-agent-bmm-dev.md index 9de4f8f..1ee1e97 100644 --- a/.opencode/agent/bmad-agent-bmm-dev.md +++ b/.opencode/agent/bmad-agent-bmm-dev.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'dev' description: 'dev agent' --- diff --git a/.opencode/agent/bmad-agent-bmm-pm.md b/.opencode/agent/bmad-agent-bmm-pm.md index d320ce5..e27c533 100644 --- a/.opencode/agent/bmad-agent-bmm-pm.md +++ b/.opencode/agent/bmad-agent-bmm-pm.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'pm' description: 'pm agent' --- diff --git a/.opencode/agent/bmad-agent-bmm-qa.md b/.opencode/agent/bmad-agent-bmm-qa.md index c81bb7d..d8c3875 100644 --- a/.opencode/agent/bmad-agent-bmm-qa.md +++ b/.opencode/agent/bmad-agent-bmm-qa.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'qa' description: 'qa agent' --- diff --git a/.opencode/agent/bmad-agent-bmm-quick-flow-solo-dev.md b/.opencode/agent/bmad-agent-bmm-quick-flow-solo-dev.md index fb8ad57..c00703a 100644 --- a/.opencode/agent/bmad-agent-bmm-quick-flow-solo-dev.md +++ b/.opencode/agent/bmad-agent-bmm-quick-flow-solo-dev.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'quick-flow-solo-dev' description: 'quick-flow-solo-dev agent' --- diff --git a/.opencode/agent/bmad-agent-bmm-sm.md b/.opencode/agent/bmad-agent-bmm-sm.md index 3ad4223..36a4bca 100644 --- a/.opencode/agent/bmad-agent-bmm-sm.md +++ b/.opencode/agent/bmad-agent-bmm-sm.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'sm' description: 'sm agent' --- diff --git a/.opencode/agent/bmad-agent-bmm-tech-writer.md b/.opencode/agent/bmad-agent-bmm-tech-writer.md index fd552b1..b261edd 100644 --- a/.opencode/agent/bmad-agent-bmm-tech-writer.md +++ b/.opencode/agent/bmad-agent-bmm-tech-writer.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'tech-writer' description: 'tech-writer agent' --- diff --git a/.opencode/agent/bmad-agent-bmm-ux-designer.md b/.opencode/agent/bmad-agent-bmm-ux-designer.md index 6d106b3..0f5b26b 100644 --- a/.opencode/agent/bmad-agent-bmm-ux-designer.md +++ b/.opencode/agent/bmad-agent-bmm-ux-designer.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'ux-designer' description: 'ux-designer agent' --- diff --git a/.opencode/agent/bmad-agent-cis-brainstorming-coach.md b/.opencode/agent/bmad-agent-cis-brainstorming-coach.md index 35797cd..be9f54c 100644 --- a/.opencode/agent/bmad-agent-cis-brainstorming-coach.md +++ b/.opencode/agent/bmad-agent-cis-brainstorming-coach.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'brainstorming-coach' description: 'brainstorming-coach agent' --- diff --git a/.opencode/agent/bmad-agent-cis-creative-problem-solver.md b/.opencode/agent/bmad-agent-cis-creative-problem-solver.md index e4b0a6a..cda45df 100644 --- a/.opencode/agent/bmad-agent-cis-creative-problem-solver.md +++ b/.opencode/agent/bmad-agent-cis-creative-problem-solver.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'creative-problem-solver' description: 'creative-problem-solver agent' --- diff --git a/.opencode/agent/bmad-agent-cis-design-thinking-coach.md b/.opencode/agent/bmad-agent-cis-design-thinking-coach.md index a19a9ad..df6a432 100644 --- a/.opencode/agent/bmad-agent-cis-design-thinking-coach.md +++ b/.opencode/agent/bmad-agent-cis-design-thinking-coach.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'design-thinking-coach' description: 'design-thinking-coach agent' --- diff --git a/.opencode/agent/bmad-agent-cis-innovation-strategist.md b/.opencode/agent/bmad-agent-cis-innovation-strategist.md index 6faa258..f1533e2 100644 --- a/.opencode/agent/bmad-agent-cis-innovation-strategist.md +++ b/.opencode/agent/bmad-agent-cis-innovation-strategist.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'innovation-strategist' description: 'innovation-strategist agent' --- diff --git a/.opencode/agent/bmad-agent-cis-presentation-master.md b/.opencode/agent/bmad-agent-cis-presentation-master.md index 2f8d0a4..03093a7 100644 --- a/.opencode/agent/bmad-agent-cis-presentation-master.md +++ b/.opencode/agent/bmad-agent-cis-presentation-master.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'presentation-master' description: 'presentation-master agent' --- diff --git a/.opencode/agent/bmad-agent-cis-storyteller.md b/.opencode/agent/bmad-agent-cis-storyteller.md index 49ef200..cc261d5 100644 --- a/.opencode/agent/bmad-agent-cis-storyteller.md +++ b/.opencode/agent/bmad-agent-cis-storyteller.md @@ -1,5 +1,5 @@ --- -mode: primary +name: 'storyteller' description: 'storyteller agent' --- diff --git a/.opencode/command/bmad-bmb-create-agent.md b/.opencode/command/bmad-bmb-create-agent.md index 6e1cd2e..17ee466 100644 --- a/.opencode/command/bmad-bmb-create-agent.md +++ b/.opencode/command/bmad-bmb-create-agent.md @@ -1,4 +1,5 @@ --- +name: 'create-agent' description: 'Create a new BMAD agent with best practices and compliance' --- @@ -7,6 +8,7 @@ Execute the BMAD 'create-agent' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmb/workflows/agent/workflow-create-agent.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmb-create-module-brief.md b/.opencode/command/bmad-bmb-create-module-brief.md index 49c99ae..20ddf5f 100644 --- a/.opencode/command/bmad-bmb-create-module-brief.md +++ b/.opencode/command/bmad-bmb-create-module-brief.md @@ -1,4 +1,5 @@ --- +name: 'create-module-brief' description: 'Create product brief for BMAD module development' --- @@ -7,6 +8,7 @@ Execute the BMAD 'create-module-brief' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmb/workflows/module/workflow-create-module-brief.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmb-create-module.md b/.opencode/command/bmad-bmb-create-module.md index c10a794..828bf25 100644 --- a/.opencode/command/bmad-bmb-create-module.md +++ b/.opencode/command/bmad-bmb-create-module.md @@ -1,4 +1,5 @@ --- +name: 'create-module' description: 'Create a complete BMAD module with agents, workflows, and infrastructure' --- @@ -7,6 +8,7 @@ Execute the BMAD 'create-module' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmb/workflows/module/workflow-create-module.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmb-create-workflow.md b/.opencode/command/bmad-bmb-create-workflow.md index 311712a..be6a5ee 100644 --- a/.opencode/command/bmad-bmb-create-workflow.md +++ b/.opencode/command/bmad-bmb-create-workflow.md @@ -1,4 +1,5 @@ --- +name: 'create-workflow' description: 'Create a new BMAD workflow with proper structure and best practices' --- @@ -7,6 +8,7 @@ Execute the BMAD 'create-workflow' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmb/workflows/workflow/workflow-create-workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmb-edit-agent.md b/.opencode/command/bmad-bmb-edit-agent.md index 9ba5608..de70bc9 100644 --- a/.opencode/command/bmad-bmb-edit-agent.md +++ b/.opencode/command/bmad-bmb-edit-agent.md @@ -1,4 +1,5 @@ --- +name: 'edit-agent' description: 'Edit existing BMAD agents while maintaining compliance' --- @@ -7,6 +8,7 @@ Execute the BMAD 'edit-agent' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmb/workflows/agent/workflow-edit-agent.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmb-edit-module.md b/.opencode/command/bmad-bmb-edit-module.md index 79752f5..bb83f26 100644 --- a/.opencode/command/bmad-bmb-edit-module.md +++ b/.opencode/command/bmad-bmb-edit-module.md @@ -1,4 +1,5 @@ --- +name: 'edit-module' description: 'Edit existing BMAD modules while maintaining coherence' --- @@ -7,6 +8,7 @@ Execute the BMAD 'edit-module' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmb/workflows/module/workflow-edit-module.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmb-edit-workflow.md b/.opencode/command/bmad-bmb-edit-workflow.md index 1666d3a..ec27a8b 100644 --- a/.opencode/command/bmad-bmb-edit-workflow.md +++ b/.opencode/command/bmad-bmb-edit-workflow.md @@ -1,4 +1,5 @@ --- +name: 'edit-workflow' description: 'Edit existing BMAD workflows while maintaining integrity' --- @@ -7,6 +8,7 @@ Execute the BMAD 'edit-workflow' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmb/workflows/workflow/workflow-edit-workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmb-rework-workflow.md b/.opencode/command/bmad-bmb-rework-workflow.md index f0d4255..d558162 100644 --- a/.opencode/command/bmad-bmb-rework-workflow.md +++ b/.opencode/command/bmad-bmb-rework-workflow.md @@ -1,4 +1,5 @@ --- +name: 'rework-workflow' description: 'Rework a Workflow to a V6 Compliant Version' --- @@ -7,6 +8,7 @@ Execute the BMAD 'rework-workflow' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmb/workflows/workflow/workflow-rework-workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmb-validate-agent.md b/.opencode/command/bmad-bmb-validate-agent.md index ddb4268..2b03927 100644 --- a/.opencode/command/bmad-bmb-validate-agent.md +++ b/.opencode/command/bmad-bmb-validate-agent.md @@ -1,4 +1,5 @@ --- +name: 'validate-agent' description: 'Validate existing BMAD agents and offer to improve deficiencies' --- @@ -7,6 +8,7 @@ Execute the BMAD 'validate-agent' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmb/workflows/agent/workflow-validate-agent.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmb-validate-max-parallel-workflow.md b/.opencode/command/bmad-bmb-validate-max-parallel-workflow.md index a17e65b..00225fb 100644 --- a/.opencode/command/bmad-bmb-validate-max-parallel-workflow.md +++ b/.opencode/command/bmad-bmb-validate-max-parallel-workflow.md @@ -1,4 +1,5 @@ --- +name: 'validate-max-parallel-workflow' description: 'Run validation checks in MAX-PARALLEL mode against a workflow requires a tool that supports Parallel Sub-Processes' --- @@ -7,6 +8,7 @@ Execute the BMAD 'validate-max-parallel-workflow' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmb/workflows/workflow/workflow-validate-max-parallel-workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmb-validate-module.md b/.opencode/command/bmad-bmb-validate-module.md index 5da2a84..6997bd5 100644 --- a/.opencode/command/bmad-bmb-validate-module.md +++ b/.opencode/command/bmad-bmb-validate-module.md @@ -1,4 +1,5 @@ --- +name: 'validate-module' description: 'Run compliance check on BMAD modules against best practices' --- @@ -7,6 +8,7 @@ Execute the BMAD 'validate-module' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmb/workflows/module/workflow-validate-module.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmb-validate-workflow.md b/.opencode/command/bmad-bmb-validate-workflow.md index 8b90da6..4d27c19 100644 --- a/.opencode/command/bmad-bmb-validate-workflow.md +++ b/.opencode/command/bmad-bmb-validate-workflow.md @@ -1,4 +1,5 @@ --- +name: 'validate-workflow' description: 'Run validation check on BMAD workflows against best practices' --- @@ -7,6 +8,7 @@ Execute the BMAD 'validate-workflow' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmb/workflows/workflow/workflow-validate-workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-check-implementation-readiness.md b/.opencode/command/bmad-bmm-check-implementation-readiness.md index 747f0e6..8dbed34 100644 --- a/.opencode/command/bmad-bmm-check-implementation-readiness.md +++ b/.opencode/command/bmad-bmm-check-implementation-readiness.md @@ -1,4 +1,5 @@ --- +name: 'check-implementation-readiness' description: 'Critical validation workflow that assesses PRD, Architecture, and Epics & Stories for completeness and alignment before implementation. Uses adversarial review approach to find gaps and issues.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'check-implementation-readiness' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/3-solutioning/check-implementation-readiness/workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-code-review.md b/.opencode/command/bmad-bmm-code-review.md index 117d067..1bd55c8 100644 --- a/.opencode/command/bmad-bmm-code-review.md +++ b/.opencode/command/bmad-bmm-code-review.md @@ -1,4 +1,5 @@ --- +name: 'code-review' description: 'Perform an ADVERSARIAL Senior Developer code review that finds 3-10 specific problems in every story. Challenges everything: code quality, test coverage, architecture compliance, security, performance. NEVER accepts `looks good` - must find minimum issues and can auto-fix with user approval.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'code-review' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/4-implementation/code-review/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-correct-course.md b/.opencode/command/bmad-bmm-correct-course.md index c570608..ecc5e85 100644 --- a/.opencode/command/bmad-bmm-correct-course.md +++ b/.opencode/command/bmad-bmm-correct-course.md @@ -1,4 +1,5 @@ --- +name: 'correct-course' description: 'Navigate significant changes during sprint execution by analyzing impact, proposing solutions, and routing for implementation' --- @@ -7,6 +8,7 @@ Execute the BMAD 'correct-course' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/4-implementation/correct-course/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-create-architecture.md b/.opencode/command/bmad-bmm-create-architecture.md index ac52642..1936461 100644 --- a/.opencode/command/bmad-bmm-create-architecture.md +++ b/.opencode/command/bmad-bmm-create-architecture.md @@ -1,4 +1,5 @@ --- +name: 'create-architecture' description: 'Collaborative architectural decision facilitation for AI-agent consistency. Replaces template-driven architecture with intelligent, adaptive conversation that produces a decision-focused architecture document optimized for preventing agent conflicts.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'create-architecture' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/3-solutioning/create-architecture/workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-create-epics-and-stories.md b/.opencode/command/bmad-bmm-create-epics-and-stories.md index 1d524e7..fea3816 100644 --- a/.opencode/command/bmad-bmm-create-epics-and-stories.md +++ b/.opencode/command/bmad-bmm-create-epics-and-stories.md @@ -1,4 +1,5 @@ --- +name: 'create-epics-and-stories' description: 'Transform PRD requirements and Architecture decisions into comprehensive stories organized by user value. This workflow requires completed PRD + Architecture documents (UX recommended if UI exists) and breaks down requirements into implementation-ready epics and user stories that incorporate all available technical and design context. Creates detailed, actionable stories with complete acceptance criteria for development teams.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'create-epics-and-stories' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/3-solutioning/create-epics-and-stories/workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-create-prd.md b/.opencode/command/bmad-bmm-create-prd.md index fb3da9c..72a9643 100644 --- a/.opencode/command/bmad-bmm-create-prd.md +++ b/.opencode/command/bmad-bmm-create-prd.md @@ -1,4 +1,5 @@ --- +name: 'create-prd' description: 'Create a comprehensive PRD (Product Requirements Document) through structured workflow facilitation' --- @@ -7,6 +8,7 @@ Execute the BMAD 'create-prd' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-create-prd.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-create-product-brief.md b/.opencode/command/bmad-bmm-create-product-brief.md index 78e7ea9..1d70723 100644 --- a/.opencode/command/bmad-bmm-create-product-brief.md +++ b/.opencode/command/bmad-bmm-create-product-brief.md @@ -1,4 +1,5 @@ --- +name: 'create-product-brief' description: 'Create comprehensive product briefs through collaborative step-by-step discovery as creative Business Analyst working with the user as peers.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'create-product-brief' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/1-analysis/create-product-brief/workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-create-story.md b/.opencode/command/bmad-bmm-create-story.md index bffdb36..277e911 100644 --- a/.opencode/command/bmad-bmm-create-story.md +++ b/.opencode/command/bmad-bmm-create-story.md @@ -1,4 +1,5 @@ --- +name: 'create-story' description: 'Create the next user story from epics+stories with enhanced context analysis and direct ready-for-dev marking' --- @@ -7,6 +8,7 @@ Execute the BMAD 'create-story' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-create-ux-design.md b/.opencode/command/bmad-bmm-create-ux-design.md index 23bbacd..e5c8547 100644 --- a/.opencode/command/bmad-bmm-create-ux-design.md +++ b/.opencode/command/bmad-bmm-create-ux-design.md @@ -1,4 +1,5 @@ --- +name: 'create-ux-design' description: 'Work with a peer UX Design expert to plan your applications UX patterns, look and feel.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'create-ux-design' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/2-plan-workflows/create-ux-design/workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-dev-story.md b/.opencode/command/bmad-bmm-dev-story.md index 1cab0b0..6354371 100644 --- a/.opencode/command/bmad-bmm-dev-story.md +++ b/.opencode/command/bmad-bmm-dev-story.md @@ -1,4 +1,5 @@ --- +name: 'dev-story' description: 'Execute a story by implementing tasks/subtasks, writing tests, validating, and updating the story file per acceptance criteria' --- @@ -7,6 +8,7 @@ Execute the BMAD 'dev-story' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/4-implementation/dev-story/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-document-project.md b/.opencode/command/bmad-bmm-document-project.md index 2d06569..9506323 100644 --- a/.opencode/command/bmad-bmm-document-project.md +++ b/.opencode/command/bmad-bmm-document-project.md @@ -1,4 +1,5 @@ --- +name: 'document-project' description: 'Analyzes and documents brownfield projects by scanning codebase, architecture, and patterns to create comprehensive reference documentation for AI-assisted development' --- @@ -7,6 +8,7 @@ Execute the BMAD 'document-project' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/document-project/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-domain-research.md b/.opencode/command/bmad-bmm-domain-research.md index 27e069d..7e3132e 100644 --- a/.opencode/command/bmad-bmm-domain-research.md +++ b/.opencode/command/bmad-bmm-domain-research.md @@ -1,4 +1,5 @@ --- +name: 'domain-research' description: 'Conduct domain research covering industry analysis, regulations, technology trends, and ecosystem dynamics using current web data and verified sources.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'domain-research' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/1-analysis/research/workflow-domain-research.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-edit-prd.md b/.opencode/command/bmad-bmm-edit-prd.md index a9d18eb..824fea0 100644 --- a/.opencode/command/bmad-bmm-edit-prd.md +++ b/.opencode/command/bmad-bmm-edit-prd.md @@ -1,4 +1,5 @@ --- +name: 'edit-prd' description: 'Edit and improve an existing PRD - enhance clarity, completeness, and quality' --- @@ -7,6 +8,7 @@ Execute the BMAD 'edit-prd' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-edit-prd.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-generate-project-context.md b/.opencode/command/bmad-bmm-generate-project-context.md index b9fbb3e..38b754b 100644 --- a/.opencode/command/bmad-bmm-generate-project-context.md +++ b/.opencode/command/bmad-bmm-generate-project-context.md @@ -1,4 +1,5 @@ --- +name: 'generate-project-context' description: 'Creates a concise project-context.md file with critical rules and patterns that AI agents must follow when implementing code. Optimized for LLM context efficiency.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'generate-project-context' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/generate-project-context/workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-market-research.md b/.opencode/command/bmad-bmm-market-research.md index c81e0a6..d9f3195 100644 --- a/.opencode/command/bmad-bmm-market-research.md +++ b/.opencode/command/bmad-bmm-market-research.md @@ -1,4 +1,5 @@ --- +name: 'market-research' description: 'Conduct market research covering market size, growth, competition, and customer insights using current web data and verified sources.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'market-research' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/1-analysis/research/workflow-market-research.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-qa-automate.md b/.opencode/command/bmad-bmm-qa-automate.md index 66592ac..7f924b6 100644 --- a/.opencode/command/bmad-bmm-qa-automate.md +++ b/.opencode/command/bmad-bmm-qa-automate.md @@ -1,4 +1,5 @@ --- +name: 'qa-automate' description: 'Generate tests quickly for existing features using standard test patterns' --- @@ -7,6 +8,7 @@ Execute the BMAD 'qa-automate' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/qa/automate/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-quick-dev.md b/.opencode/command/bmad-bmm-quick-dev.md index 8987e24..1850b54 100644 --- a/.opencode/command/bmad-bmm-quick-dev.md +++ b/.opencode/command/bmad-bmm-quick-dev.md @@ -1,4 +1,5 @@ --- +name: 'quick-dev' description: 'Flexible development - execute tech-specs OR direct instructions with optional planning.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'quick-dev' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/bmad-quick-flow/quick-dev/workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-quick-spec.md b/.opencode/command/bmad-bmm-quick-spec.md index 86924e7..e1ec774 100644 --- a/.opencode/command/bmad-bmm-quick-spec.md +++ b/.opencode/command/bmad-bmm-quick-spec.md @@ -1,4 +1,5 @@ --- +name: 'quick-spec' description: 'Conversational spec engineering - ask questions, investigate code, produce implementation-ready tech-spec.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'quick-spec' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/bmad-quick-flow/quick-spec/workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-retrospective.md b/.opencode/command/bmad-bmm-retrospective.md index 40b38cf..112f427 100644 --- a/.opencode/command/bmad-bmm-retrospective.md +++ b/.opencode/command/bmad-bmm-retrospective.md @@ -1,4 +1,5 @@ --- +name: 'retrospective' description: 'Run after epic completion to review overall success, extract lessons learned, and explore if new information emerged that might impact the next epic' --- @@ -7,6 +8,7 @@ Execute the BMAD 'retrospective' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/4-implementation/retrospective/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-sprint-planning.md b/.opencode/command/bmad-bmm-sprint-planning.md index 8add5ac..e838684 100644 --- a/.opencode/command/bmad-bmm-sprint-planning.md +++ b/.opencode/command/bmad-bmm-sprint-planning.md @@ -1,4 +1,5 @@ --- +name: 'sprint-planning' description: 'Generate and manage the sprint status tracking file for Phase 4 implementation, extracting all epics and stories from epic files and tracking their status through the development lifecycle' --- @@ -7,6 +8,7 @@ Execute the BMAD 'sprint-planning' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/4-implementation/sprint-planning/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-sprint-status.md b/.opencode/command/bmad-bmm-sprint-status.md index 3414810..34a41ba 100644 --- a/.opencode/command/bmad-bmm-sprint-status.md +++ b/.opencode/command/bmad-bmm-sprint-status.md @@ -1,4 +1,5 @@ --- +name: 'sprint-status' description: 'Summarize sprint-status.yaml, surface risks, and route to the right implementation workflow.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'sprint-status' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/4-implementation/sprint-status/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-technical-research.md b/.opencode/command/bmad-bmm-technical-research.md index 44f5d6f..af45a0c 100644 --- a/.opencode/command/bmad-bmm-technical-research.md +++ b/.opencode/command/bmad-bmm-technical-research.md @@ -1,4 +1,5 @@ --- +name: 'technical-research' description: 'Conduct technical research covering technology evaluation, architecture decisions, and implementation approaches using current web data and verified sources.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'technical-research' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/1-analysis/research/workflow-technical-research.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-bmm-validate-prd.md b/.opencode/command/bmad-bmm-validate-prd.md index 332412f..25c9734 100644 --- a/.opencode/command/bmad-bmm-validate-prd.md +++ b/.opencode/command/bmad-bmm-validate-prd.md @@ -1,4 +1,5 @@ --- +name: 'validate-prd' description: 'Validate an existing PRD against BMAD standards - comprehensive review for completeness, clarity, and quality' --- @@ -7,6 +8,7 @@ Execute the BMAD 'validate-prd' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-validate-prd.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-brainstorming.md b/.opencode/command/bmad-brainstorming.md index fbfdf51..bbcc4d7 100644 --- a/.opencode/command/bmad-brainstorming.md +++ b/.opencode/command/bmad-brainstorming.md @@ -1,4 +1,5 @@ --- +name: 'brainstorming' description: 'Facilitate interactive brainstorming sessions using diverse creative techniques and ideation methods' --- @@ -7,6 +8,7 @@ Execute the BMAD 'brainstorming' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/core/workflows/brainstorming/workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-cis-design-thinking.md b/.opencode/command/bmad-cis-design-thinking.md index dc57b1d..4f3c605 100644 --- a/.opencode/command/bmad-cis-design-thinking.md +++ b/.opencode/command/bmad-cis-design-thinking.md @@ -1,4 +1,5 @@ --- +name: 'design-thinking' description: 'Guide human-centered design processes using empathy-driven methodologies. This workflow walks through the design thinking phases - Empathize, Define, Ideate, Prototype, and Test - to create solutions deeply rooted in user needs.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'design-thinking' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/cis/workflows/design-thinking/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-cis-innovation-strategy.md b/.opencode/command/bmad-cis-innovation-strategy.md index 880ed70..cdbcc0d 100644 --- a/.opencode/command/bmad-cis-innovation-strategy.md +++ b/.opencode/command/bmad-cis-innovation-strategy.md @@ -1,4 +1,5 @@ --- +name: 'innovation-strategy' description: 'Identify disruption opportunities and architect business model innovation. This workflow guides strategic analysis of markets, competitive dynamics, and business model innovation to uncover sustainable competitive advantages and breakthrough opportunities.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'innovation-strategy' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/cis/workflows/innovation-strategy/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-cis-problem-solving.md b/.opencode/command/bmad-cis-problem-solving.md index fda8723..6ed6c9e 100644 --- a/.opencode/command/bmad-cis-problem-solving.md +++ b/.opencode/command/bmad-cis-problem-solving.md @@ -1,4 +1,5 @@ --- +name: 'problem-solving' description: 'Apply systematic problem-solving methodologies to crack complex challenges. This workflow guides through problem diagnosis, root cause analysis, creative solution generation, evaluation, and implementation planning using proven frameworks.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'problem-solving' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/cis/workflows/problem-solving/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-cis-storytelling.md b/.opencode/command/bmad-cis-storytelling.md index c70eaa9..af6576c 100644 --- a/.opencode/command/bmad-cis-storytelling.md +++ b/.opencode/command/bmad-cis-storytelling.md @@ -1,4 +1,5 @@ --- +name: 'storytelling' description: 'Craft compelling narratives using proven story frameworks and techniques. This workflow guides users through structured narrative development, applying appropriate story frameworks to create emotionally resonant and engaging stories for any purpose.' --- @@ -7,6 +8,7 @@ Execute the BMAD 'storytelling' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/cis/workflows/storytelling/workflow.yaml 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-editorial-review-prose.md b/.opencode/command/bmad-editorial-review-prose.md index 5b3bce3..397def3 100644 --- a/.opencode/command/bmad-editorial-review-prose.md +++ b/.opencode/command/bmad-editorial-review-prose.md @@ -1,10 +1,12 @@ --- +name: 'editorial-review-prose' description: 'Clinical copy-editor that reviews text for communication issues' --- Execute the BMAD 'editorial-review-prose' task. TASK INSTRUCTIONS: + 1. LOAD the task file from {project-root}/_bmad/core/tasks/editorial-review-prose.xml 2. READ its entire contents 3. FOLLOW every instruction precisely as specified diff --git a/.opencode/command/bmad-editorial-review-structure.md b/.opencode/command/bmad-editorial-review-structure.md index 0e7430b..fed46a9 100644 --- a/.opencode/command/bmad-editorial-review-structure.md +++ b/.opencode/command/bmad-editorial-review-structure.md @@ -1,10 +1,12 @@ --- +name: 'editorial-review-structure' description: 'Structural editor that proposes cuts, reorganization, and simplification while preserving comprehension' --- Execute the BMAD 'editorial-review-structure' task. TASK INSTRUCTIONS: + 1. LOAD the task file from {project-root}/_bmad/core/tasks/editorial-review-structure.xml 2. READ its entire contents 3. FOLLOW every instruction precisely as specified diff --git a/.opencode/command/bmad-help.md b/.opencode/command/bmad-help.md index 1a1f3a8..f0fc875 100644 --- a/.opencode/command/bmad-help.md +++ b/.opencode/command/bmad-help.md @@ -1,10 +1,12 @@ --- +name: 'help' description: 'Get unstuck by showing what workflow steps come next or answering questions about what to do' --- Execute the BMAD 'help' task. TASK INSTRUCTIONS: + 1. LOAD the task file from {project-root}/_bmad/core/tasks/help.md 2. READ its entire contents 3. FOLLOW every instruction precisely as specified diff --git a/.opencode/command/bmad-index-docs.md b/.opencode/command/bmad-index-docs.md index 50ae5df..7dca506 100644 --- a/.opencode/command/bmad-index-docs.md +++ b/.opencode/command/bmad-index-docs.md @@ -1,10 +1,12 @@ --- +name: 'index-docs' description: 'Generates or updates an index.md of all documents in the specified directory' --- Execute the BMAD 'index-docs' task. TASK INSTRUCTIONS: + 1. LOAD the task file from {project-root}/_bmad/core/tasks/index-docs.xml 2. READ its entire contents 3. FOLLOW every instruction precisely as specified diff --git a/.opencode/command/bmad-party-mode.md b/.opencode/command/bmad-party-mode.md index e2c172f..cb13612 100644 --- a/.opencode/command/bmad-party-mode.md +++ b/.opencode/command/bmad-party-mode.md @@ -1,4 +1,5 @@ --- +name: 'party-mode' description: 'Orchestrates group discussions between all installed BMAD agents, enabling natural multi-agent conversations' --- @@ -7,6 +8,7 @@ Execute the BMAD 'party-mode' workflow. CRITICAL: You must load and follow the workflow definition exactly. WORKFLOW INSTRUCTIONS: + 1. LOAD the workflow file from {project-root}/_bmad/core/workflows/party-mode/workflow.md 2. READ its entire contents 3. FOLLOW every step precisely as specified diff --git a/.opencode/command/bmad-review-adversarial-general.md b/.opencode/command/bmad-review-adversarial-general.md index 7ead337..da6c205 100644 --- a/.opencode/command/bmad-review-adversarial-general.md +++ b/.opencode/command/bmad-review-adversarial-general.md @@ -1,10 +1,12 @@ --- +name: 'review-adversarial-general' description: 'Cynically review content and produce findings' --- Execute the BMAD 'review-adversarial-general' task. TASK INSTRUCTIONS: + 1. LOAD the task file from {project-root}/_bmad/core/tasks/review-adversarial-general.xml 2. READ its entire contents 3. FOLLOW every instruction precisely as specified diff --git a/.opencode/command/bmad-shard-doc.md b/.opencode/command/bmad-shard-doc.md index 5fe7056..7df0a5a 100644 --- a/.opencode/command/bmad-shard-doc.md +++ b/.opencode/command/bmad-shard-doc.md @@ -1,10 +1,12 @@ --- +name: 'shard-doc' description: 'Splits large markdown documents into smaller, organized files based on level 2 (default) sections' --- Execute the BMAD 'shard-doc' task. TASK INSTRUCTIONS: + 1. LOAD the task file from {project-root}/_bmad/core/tasks/shard-doc.xml 2. READ its entire contents 3. FOLLOW every instruction precisely as specified diff --git a/Cargo.toml b/Cargo.toml index 029c85d..43790a3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,7 +2,9 @@ members = [ "crates/components", "crates/core", - # "crates/solver", # Will be added in future stories + "crates/fluids", + "demo", # Demo/test project (user experiments) + "crates/solver", ] resolver = "2" diff --git a/EXAMPLES.md b/EXAMPLES.md new file mode 100644 index 0000000..0db0a10 --- /dev/null +++ b/EXAMPLES.md @@ -0,0 +1,553 @@ +# Exemples d'utilisation - Entropyk + +Ce document présente des exemples d'utilisation de la bibliothèque Entropyk, basés sur les composants actuellement développés (Epic 1 : Extensible Component Framework). + +## Table des matières + +1. [Types physiques (entropyk-core)](#1-types-physiques-entropyk-core) +2. [Ports et connexions](#2-ports-et-connexions) +3. [Compresseur (AHRI 540)](#3-compresseur-ahri-540) +4. [Détendeur (expansion valve)](#4-détendeur-expansion-valve) +5. [Conduite (pipe)](#5-conduite-pipe) +6. [Pompe](#6-pompe) +7. [Ventilateur](#7-ventilateur) +8. [Échangeurs de chaleur](#8-échangeurs-de-chaleur) +9. [Machine à états (ON/OFF/BYPASS)](#9-machine-à-états-onoffbypass) +10. [Polynômes et courbes de performance](#10-polynômes-et-courbes-de-performance) + +--- + +## 1. Types physiques (entropyk-core) + +Les types physiques utilisent le pattern NewType pour la sécurité des unités à la compilation. + +```rust +use entropyk_core::{Pressure, Enthalpy, Temperature, MassFlow, Power}; + +fn main() { + // Pression : Pascals, bar, PSI + let p = Pressure::from_bar(3.5); + println!("Pression: {} Pa = {:.2} bar", p.to_pascals(), p.to_bar()); + + // Enthalpie : J/kg + let h = Enthalpy::from_joules_per_kg(400_000.0); + println!("Enthalpie: {:.0} J/kg", h.to_joules_per_kg()); + + // Température : Kelvin, Celsius, Fahrenheit + let t = Temperature::from_celsius(25.0); + println!("Température: {:.2} K = {:.1} °C", t.to_kelvin(), t.to_celsius()); + + // Débit massique : kg/s + let m = MassFlow::from_kg_per_s(0.05); + println!("Débit: {:.4} kg/s", m.to_kg_per_s()); + + // Puissance : Watts + let w = Power::from_watts(1500.0); + println!("Puissance: {:.0} W", w.to_watts()); + + // Opérations arithmétiques + let p2 = p + Pressure::from_bar(1.0); + let p3 = p * 2.0; +} +``` + +--- + +## 2. Ports et connexions + +Le système de ports utilise le **Type-State pattern** : les ports doivent être connectés avant utilisation dans le solveur. + +```rust +use entropyk_components::port::{Port, FluidId, ConnectionError}; +use entropyk_core::{Pressure, Enthalpy}; + +fn main() -> Result<(), ConnectionError> { + // Créer deux ports déconnectés (même fluide, pression et enthalpie pour validation) + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(400_000.0), + ); + + let port2 = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(400_000.0), + ); + + // Connecter les ports + let (mut connected1, mut connected2) = port1.connect(port2)?; + + // Une fois connectés, on peut lire et modifier les valeurs + println!("Pression port 1: {:.2} bar", connected1.pressure().to_bar()); + connected1.set_pressure(Pressure::from_bar(1.5)); + connected1.set_enthalpy(Enthalpy::from_joules_per_kg(450_000.0)); + + Ok(()) +} +``` + +### Gestion des erreurs de connexion + +```rust +use entropyk_components::port::{Port, FluidId, ConnectionError}; +use entropyk_core::{Pressure, Enthalpy}; + +fn main() { + // Erreur : fluides incompatibles + let r134a = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(400_000.0), + ); + let water = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(400_000.0), + ); + + match r134a.connect(water) { + Err(ConnectionError::IncompatibleFluid { from, to }) => { + println!("Erreur: {} et {} sont incompatibles", from, to); + } + _ => {} + } + + // Erreur : pression différente + let p1 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100_000.0), + Enthalpy::from_joules_per_kg(400_000.0), + ); + let p2 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(200_000.0), + Enthalpy::from_joules_per_kg(400_000.0), + ); + + match p1.connect(p2) { + Err(ConnectionError::PressureMismatch { .. }) => { + println!("Erreur: pression non compatible"); + } + _ => {} + } +} +``` + +--- + +## 3. Compresseur (AHRI 540) + +Le compresseur utilise les coefficients AHRI 540 pour le débit massique et la puissance. + +```rust +use entropyk_components::compressor::{Compressor, Ahri540Coefficients}; +use entropyk_components::port::{FluidId, Port}; +use entropyk_core::{Pressure, Enthalpy}; + +fn main() -> Result<(), Box> { + // Coefficients AHRI 540 (exemple typique) + let coeffs = Ahri540Coefficients::new( + 0.85, 2.5, // M1, M2 (débit) + 500.0, 1500.0, -2.5, 1.8, // M3-M6 (puissance refroidissement) + 600.0, 1600.0, -3.0, 2.0 // M7-M10 (puissance chauffage) + ); + + // Créer les ports (même P et h pour validation) + let suction = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(3.5), + Enthalpy::from_joules_per_kg(400_000.0), + ); + let discharge = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(3.5), + Enthalpy::from_joules_per_kg(400_000.0), + ); + + // Créer le compresseur déconnecté + let compressor = Compressor::new( + coeffs, + suction, + discharge, + 2900.0, // RPM + 0.0001, // Volume balayé (m³/tour) + 0.85 // Rendement mécanique + )?; + + println!("Compresseur créé: fluide={}, vitesse={} RPM", + compressor.fluid_id(), + compressor.speed_rpm() + ); + + Ok(()) +} +``` + +### Modèle SST/SDT (températures de saturation) + +```rust +use entropyk_components::compressor::{Compressor, SstSdtCoefficients}; +use entropyk_components::polynomials::Polynomial2D; + +// Modèle bilinéaire: ṁ = a00 + a10*SST + a01*SDT + a11*SST*SDT +let mass_coeffs = SstSdtCoefficients::bilinear( + 0.05, 0.001, 0.0005, 0.00001, // coefficients débit + 1000.0, 50.0, 30.0, 0.5 // coefficients puissance +); + +// Évaluer à SST=273K, SDT=313K +let mass_flow = mass_coeffs.mass_flow_at(273.15, 313.15); +let power = mass_coeffs.power_at(273.15, 313.15); +``` + +--- + +## 4. Détendeur (expansion valve) + +Le détendeur modélise une détente isenthalpique (h_out = h_in). + +```rust +use entropyk_components::expansion_valve::ExpansionValve; +use entropyk_components::port::{FluidId, Port}; +use entropyk_core::{Pressure, Enthalpy}; + +fn main() -> Result<(), Box> { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250_000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250_000.0), + ); + + // Ouverture optionnelle: None = variable, Some(1.0) = pleinement ouvert + let valve = ExpansionValve::new(inlet, outlet, Some(1.0))?; + + println!("Détendeur créé: fluide={}", valve.fluid_id()); + + // États opérationnels: On, Off, Bypass + // valve.set_operational_state(OperationalState::Off); + + Ok(()) +} +``` + +--- + +## 5. Conduite (pipe) + +Modélisation de la perte de charge avec l'équation de Darcy-Weisbach. + +```rust +use entropyk_components::pipe::{Pipe, PipeGeometry, roughness}; +use entropyk_components::port::{FluidId, Port}; +use entropyk_core::{Pressure, Enthalpy}; + +fn main() -> Result<(), Box> { + // Géométrie: 10m de tuyau cuivre 22mm, lisse + let geometry = PipeGeometry::new( + 10.0, // longueur (m) + 0.022, // diamètre intérieur (m) + roughness::SMOOTH, // rugosité (m) + )?; + + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(84_000.0), + ); + let outlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(84_000.0), + ); + + // Eau à 20°C: ρ≈998 kg/m³, μ≈0.001 Pa·s + let pipe = Pipe::new( + geometry, + inlet, + outlet, + 998.0, // densité + 0.001, // viscosité + )?; + + println!("Conduite créée: L={}m, D={}m", + pipe.geometry().length_m, + pipe.geometry().diameter_m + ); + + Ok(()) +} +``` + +### Rugosités typiques + +```rust +use entropyk_components::pipe::roughness; + +// roughness::SMOOTH // Cuivre, plastique (0.0015 mm) +// roughness::STEEL_COMMERCIAL // Acier commercial (0.045 mm) +// roughness::GALVANIZED_IRON // Fer galvanisé (0.15 mm) +// roughness::CAST_IRON // Fonte (0.26 mm) +// roughness::PLASTIC // PVC/HDPE (0.0015 mm) +``` + +--- + +## 6. Pompe + +Courbes de performance polynomiales + lois d'affinité pour la variation de vitesse. + +```rust +use entropyk_components::pump::{Pump, PumpCurves}; +use entropyk_components::port::{FluidId, Port}; +use entropyk_core::{Pressure, Enthalpy}; + +fn main() -> Result<(), Box> { + // Courbe hauteur: H = 30 - 10*Q - 50*Q² (m) + // Courbe rendement: η = 0.5 + 0.3*Q - 0.5*Q² + let curves = PumpCurves::quadratic( + 30.0, -10.0, -50.0, // H = h0 + h1*Q + h2*Q² + 0.5, 0.3, -0.5, // η = e0 + e1*Q + e2*Q² + )?; + + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100_000.0), + ); + let outlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100_000.0), + ); + + let pump = Pump::new(curves, inlet, outlet, 1000.0)?; + + // Évaluer la courbe à un débit donné (pompe connectée requise pour pressure_rise) + let head = pump.curves().head_at_flow(0.05); + let eff = pump.curves().efficiency_at_flow(0.05); + println!("À Q=0.05 m³/s: H={:.2} m, η={:.2}%", head, eff * 100.0); + + Ok(()) +} +``` + +--- + +## 7. Ventilateur + +Similaire à la pompe, avec courbe de pression statique. + +```rust +use entropyk_components::fan::{Fan, FanCurves}; +use entropyk_components::port::{FluidId, Port}; +use entropyk_core::{Pressure, Enthalpy}; + +fn main() -> Result<(), Box> { + // Courbe pression statique (Pa) et rendement + let curves = FanCurves::quadratic( + 500.0, -100.0, -50.0, // P_s = p0 + p1*Q + p2*Q² + 0.4, 0.2, -0.3, // η + )?; + + let inlet = Port::new( + FluidId::new("Air"), + Pressure::from_pascals(101_325.0), + Enthalpy::from_joules_per_kg(300_000.0), + ); + let outlet = Port::new( + FluidId::new("Air"), + Pressure::from_pascals(101_325.0), + Enthalpy::from_joules_per_kg(300_000.0), + ); + + let fan = Fan::new(curves, inlet, outlet, 1.2)?; // ρ_air ≈ 1.2 kg/m³ + + let pressure = fan.curves().static_pressure_at_flow(1.0); + println!("À Q=1 m³/s: ΔP={:.0} Pa", pressure); + + Ok(()) +} +``` + +--- + +## 8. Échangeurs de chaleur + +### Condenseur (LMTD) + +```rust +use entropyk_components::heat_exchanger::Condenser; + +// UA = 10 kW/K +let condenser = Condenser::new(10_000.0); + +// Avec température de saturation personnalisée +let condenser = Condenser::with_saturation_temp(15_000.0, 323.15); + +println!("Condenseur: UA={} W/K, n_equations={}", condenser.ua(), condenser.n_equations()); +``` + +### Évaporateur + +```rust +use entropyk_components::heat_exchanger::Evaporator; + +let evaporator = Evaporator::new(8_000.0); +``` + +### Économiseur + +```rust +use entropyk_components::heat_exchanger::Economizer; + +let economizer = Economizer::new(5_000.0); +``` + +### Modèles de transfert (LMTD vs ε-NTU) + +```rust +use entropyk_components::heat_exchanger::{LmtdModel, EpsNtuModel, FlowConfiguration, ExchangerType}; + +// LMTD - contre-courant +let lmtd = LmtdModel::new(5000.0, FlowConfiguration::CounterFlow); + +// ε-NTU - échangeur à plaques +let eps_ntu = EpsNtuModel::new(5000.0, ExchangerType::CounterFlow); +``` + +--- + +## 9. Machine à états (ON/OFF/BYPASS) + +Les composants supportent trois états opérationnels (FR6-FR8). + +```rust +use entropyk_components::state_machine::{OperationalState, CircuitId, StateManageable}; + +fn main() { + // États disponibles + let on = OperationalState::On; // Opération normale + let off = OperationalState::Off; // Débit nul + let bypass = OperationalState::Bypass; // Conduite adiabatique (P_in=P_out, h_in=h_out) + + // Propriétés des états + println!("On actif: {}", on.is_active()); + println!("Multiplicateur débit Off: {}", off.mass_flow_multiplier()); // 0.0 + println!("Multiplicateur débit Bypass: {}", bypass.mass_flow_multiplier()); // 1.0 + + // Transitions + assert!(on.can_transition_to(OperationalState::Off)); + + // Identification des circuits (multi-circuit, ex: PAC double circuit) + let circuit_primary = CircuitId::new("primary"); + let circuit_secondary = CircuitId::new("secondary"); + println!("Circuit: {}", circuit_primary); +} +``` + +### Utilisation avec un composant + +```rust +// Sur un compresseur, pompe, détendeur, etc. +// compressor.set_operational_state(OperationalState::Off); +// pump.set_speed_ratio(0.5); // 50% vitesse pour VFD +``` + +--- + +## 10. Polynômes et courbes de performance + +### Polynôme 1D + +```rust +use entropyk_components::polynomials::Polynomial1D; + +// P(x) = 1 + 2x + 3x² +let poly = Polynomial1D::new(vec![1.0, 2.0, 3.0]); +let y = poly.evaluate(2.0); // 1 + 4 + 12 = 17 +``` + +### Polynôme 2D + +```rust +use entropyk_components::polynomials::Polynomial2D; + +// Modèle bilinéaire: f(x,y) = a00 + a10*x + a01*y + a11*x*y +let poly = Polynomial2D::bilinear(1.0, 0.1, 0.2, 0.01); +let z = poly.evaluate(10.0, 20.0); +``` + +### Lois d'affinité (pompes/ventilateurs) + +```rust +use entropyk_components::polynomials::AffinityLaws; + +// À 50% de vitesse: Q₂/Q₁=0.5, H₂/H₁=0.25, P₂/P₁=0.125 +let ratio = 0.5; +let flow_ratio = AffinityLaws::flow_ratio(ratio); // 0.5 +let head_ratio = AffinityLaws::head_ratio(ratio); // 0.25 +let power_ratio = AffinityLaws::power_ratio(ratio); // 0.125 +``` + +--- + +## Exécution des exemples + +### Binaires de démonstration + +```bash +# State Machine (compresseur ON/OFF/BYPASS) +cargo run -p entropyk-demo --bin compressor-test + +# Ports et connexions +cargo run -p entropyk-demo --bin ports + +# Détendeur +cargo run -p entropyk-demo --bin expansion_valve + +# Conduite +cargo run -p entropyk-demo --bin pipe + +# Pompe +cargo run -p entropyk-demo --bin pump +``` + +### Tests + +```bash +cargo test --workspace +cargo test -p entropyk-components +cargo test -p entropyk-core +``` + +### Documentation + +```bash +cargo doc --workspace --open +``` + +--- + +## Composants développés (état actuel) + +| Composant | Statut | Trait Component | +|-----------|--------|-----------------| +| Port/Connexion | ✅ | - | +| Compresseur (AHRI 540, SST/SDT) | ✅ | ✅ | +| Détendeur | ✅ | ✅ | +| Conduite | ✅ | ✅ | +| Pompe | ✅ | ✅ | +| Ventilateur | ✅ | ✅ | +| Condenseur/Évaporateur/Économiseur | ✅ | ✅ | +| Modèle externe | ✅ | ✅ | +| State Machine | ✅ | - | + +--- + +*Document généré à partir du code et de README_STORY_1_3.md - Février 2026* diff --git a/README_STORY_1_3.md b/README_STORY_1_3.md new file mode 100644 index 0000000..62ecd27 --- /dev/null +++ b/README_STORY_1_3.md @@ -0,0 +1,355 @@ +# Entropyk - Story 1.3: Port and Connection System + +## 🎯 Ce qui a été implémenté + +Cette story implémente le système de ports et connexions pour les composants thermodynamiques avec le **Type-State pattern** pour la sécurité à la compilation. + +### Fonctionnalités principales + +- ✅ `Port` - Structure générique avec états `Disconnected` et `Connected` +- ✅ `FluidId` - Identification des fluides pour validation de compatibilité +- ✅ `ConnectionError` - Gestion d'erreurs avec `thiserror` +- ✅ Validation des connexions (compatibilité fluide, continuité pression/enthalpie) +- ✅ Extension du trait `Component` avec `get_ports()` + +## 🚀 Instructions de test + +### 1. Prérequis + +```bash +# Vérifier que Rust est installé +rustc --version +cargo --version +``` + +### 2. Cloner/Naviguer dans le projet + +```bash +cd /Users/sepehr/dev/Entropyk +``` + +### 3. Compiler le projet + +```bash +# Compiler tout le workspace +cargo build --workspace + +# Compiler en mode release (optimisé) +cargo build --workspace --release +``` + +### 4. Exécuter tous les tests + +```bash +# Tests complets du workspace +cargo test --workspace + +# Tests avec sortie détaillée +cargo test --workspace -- --nocapture + +# Tests du crate components uniquement +cargo test -p entropyk-components + +# Tests du crate core uniquement +cargo test -p entropyk-core +``` + +### 5. Vérifier la qualité du code + +```bash +# Clippy (linting strict) +cargo clippy --workspace -- -D warnings + +# Formatage du code +cargo fmt --workspace + +# Vérifier la documentation +cargo doc --workspace --open +``` + +## 🧪 Tests manuels + +### Test 1: Création et connexion de ports + +Créez un fichier `test_ports.rs` à la racine du projet : + +```rust +use entropyk_components::port::{Port, Disconnected, Connected, FluidId, ConnectionError}; +use entropyk_core::{Pressure, Enthalpy}; + +fn main() -> Result<(), ConnectionError> { + println!("=== Test de création de ports ==="); + + // Créer deux ports déconnectés + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(400_000.0) + ); + + let port2 = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(400_000.0) + ); + + println!("Port 1 créé: fluid={:?}, pressure={:.2} Pa, enthalpy={:.2} J/kg", + port1.fluid_id(), + port1.pressure().to_pascals(), + port1.enthalpy().to_joules_per_kg() + ); + + println!("Port 2 créé: fluid={:?}, pressure={:.2} Pa, enthalpy={:.2} J/kg", + port2.fluid_id(), + port2.pressure().to_pascals(), + port2.enthalpy().to_joules_per_kg() + ); + + // Connecter les ports + println!("\n=== Connexion des ports ==="); + let (mut connected1, mut connected2) = port1.connect(port2)?; + + println!("✅ Ports connectés avec succès!"); + println!("Connected 1: pressure={:.2} Pa, enthalpy={:.2} J/kg", + connected1.pressure().to_pascals(), + connected1.enthalpy().to_joules_per_kg() + ); + + // Modifier les valeurs + println!("\n=== Modification des valeurs ==="); + connected1.set_pressure(Pressure::from_bar(1.5)); + connected1.set_enthalpy(Enthalpy::from_joules_per_kg(450_000.0)); + + println!("Port 1 modifié: pressure={:.2} Pa, enthalpy={:.2} J/kg", + connected1.pressure().to_pascals(), + connected1.enthalpy().to_joules_per_kg() + ); + + Ok(()) +} +``` + +Pour l'exécuter, ajoutez ce binary dans `Cargo.toml`: + +```toml +[[bin]] +name = "test_ports" +path = "test_ports.rs" +``` + +Puis: +```bash +cargo run --bin test_ports +``` + +### Test 2: Test d'erreurs + +```rust +use entropyk_components::port::{Port, FluidId, ConnectionError}; +use entropyk_core::{Pressure, Enthalpy}; + +fn main() { + println!("=== Test des erreurs de connexion ===\n"); + + // Test 1: Fluides incompatibles + println!("Test 1: Fluides incompatibles"); + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100_000.0), + Enthalpy::from_joules_per_kg(400_000.0) + ); + let port2 = Port::new( + FluidId::new("Water"), + Pressure::from_pascals(100_000.0), + Enthalpy::from_joules_per_kg(400_000.0) + ); + + match port1.connect(port2) { + Err(ConnectionError::IncompatibleFluid { from, to }) => { + println!("✅ Erreur capturée: Cannot connect {} to {}", from, to); + } + _ => println!("❌ Erreur non capturée!"), + } + + // Test 2: Pression différente + println!("\nTest 2: Pression différente"); + let port3 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100_000.0), + Enthalpy::from_joules_per_kg(400_000.0) + ); + let port4 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(200_000.0), + Enthalpy::from_joules_per_kg(400_000.0) + ); + + match port3.connect(port4) { + Err(ConnectionError::PressureMismatch { from_pressure, to_pressure }) => { + println!("✅ Erreur capturée: Pressure mismatch {} vs {}", + from_pressure, to_pressure); + } + _ => println!("❌ Erreur non capturée!"), + } + + // Test 3: Connexion réussie + println!("\nTest 3: Connexion réussie"); + let port5 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100_000.0), + Enthalpy::from_joules_per_kg(400_000.0) + ); + let port6 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100_000.0), + Enthalpy::from_joules_per_kg(400_000.0) + ); + + match port5.connect(port6) { + Ok(_) => println!("✅ Connexion réussie!"), + Err(_) => println!("❌ Connexion échouée!"), + } +} +``` + +### Test 3: Vérification Type-State (doit échouer à la compilation) + +Créez ce fichier pour vérifier que la sécurité à la compilation fonctionne: + +```rust +use entropyk_components::port::{Port, Disconnected}; +use entropyk_core::{Pressure, Enthalpy, FluidId}; + +fn main() { + // Créer un port déconnecté + let port: Port = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(400_000.0) + ); + + // Cette ligne doit provoquer une erreur de compilation: + // error[E0599]: no method named `pressure` found for struct `Port` + // let _p = port.pressure(); + + println!("Si vous décommentez la ligne ci-dessus, la compilation échouera!"); + println!("Cela prouve que le Type-State pattern fonctionne."); +} +``` + +## 📊 Structure des fichiers + +``` +entropyk/ +├── Cargo.toml # Workspace root +├── crates/ +│ ├── components/ # Crate components (modifié) +│ │ ├── Cargo.toml # + Dépendances ajoutées +│ │ └── src/ +│ │ ├── lib.rs # + Trait Component étendu +│ │ └── port.rs # NOUVEAU: Implémentation ports +│ └── core/ # Crate core (existant) +│ └── src/ +│ └── types.rs # Pressure, Enthalpy, etc. +└── _bmad-output/ + └── implementation-artifacts/ + └── 1-3-port-and-connection-system.md # Story document +``` + +## 🔍 Points clés de l'implémentation + +### Type-State Pattern + +```rust +// Disconnected et Connected sont des marqueurs de type vides +pub struct Disconnected; +pub struct Connected; + +// Port est générique sur l'état +pub struct Port { + fluid_id: FluidId, + pressure: Pressure, + enthalpy: Enthalpy, + _state: PhantomData, // Marqueur zéro-cost +} + +// Seuls les ports Disconnected peuvent être connectés +impl Port { + pub fn connect(self, other: Port) + -> Result<(Port, Port), ConnectionError> { + // Validation et connexion... + } +} + +// Seuls les ports Connected exposent les méthodes de lecture/écriture +impl Port { + pub fn pressure(&self) -> Pressure { self.pressure } + pub fn set_pressure(&mut self, pressure: Pressure) { self.pressure = pressure } +} +``` + +### Validation des connexions + +```rust +pub fn connect(self, other: Port) + -> Result<(Port, Port), ConnectionError> +{ + // 1. Validation du fluide + if self.fluid_id != other.fluid_id { + return Err(ConnectionError::IncompatibleFluid { ... }); + } + + // 2. Validation de la continuité de pression + if pressure_diff > 1e-6 { + return Err(ConnectionError::PressureMismatch { ... }); + } + + // 3. Validation de la continuité d'enthalpie + if enthalpy_diff > 1e-6 { + return Err(ConnectionError::EnthalpyMismatch { ... }); + } + + // Création des ports connectés avec valeurs moyennées + Ok((connected1, connected2)) +} +``` + +## 🐛 Dépannage + +### Erreur: `unresolved import entropyk_core` + +Solution: Vérifier que la dépendance est bien dans `crates/components/Cargo.toml`: + +```toml +[dependencies] +entropyk-core = { path = "../core" } +``` + +### Erreur: `approx crate not found` + +Solution: Vérifier la dev-dependency: + +```toml +[dev-dependencies] +approx = "0.5" +``` + +### Tests qui échouent + +```bash +# Nettoyer et reconstruire +cargo clean +cargo test --workspace +``` + +## 📚 Ressources + +- [Rust Type-State Pattern](https://rust-unofficial.github.io/patterns/patterns/behavioural/phantom-types.html) +- [thiserror documentation](https://docs.rs/thiserror/) +- [approx documentation](https://docs.rs/approx/) (pour les assertions flottantes) + +--- + +**Date d'implémentation**: 2026-02-14 +**Story**: 1.3 - Port and Connection System +**Statut**: ✅ Complété et testé diff --git a/_bmad-output/planning-artifacts/epics.md b/_bmad-output/planning-artifacts/epics.md index 00fad5f..4d325cf 100644 --- a/_bmad-output/planning-artifacts/epics.md +++ b/_bmad-output/planning-artifacts/epics.md @@ -102,6 +102,16 @@ This document provides the complete epic and story breakdown for Entropyk, decom **FR42:** System includes Automatic Initialization Heuristic (Smart Guesser) proposing coherent initial pressure values based on source/sink temperatures +**FR43:** Components support calibration parameters (Calib: f_m, f_dp, f_ua, f_power, f_etav) to match simulation to real machine test data + +**FR44:** System can validate results against ASHRAE 140 / BESTEST test cases (post-MVP) + +**FR45:** System supports inverse calibration (parameter estimation from test bench data) + +**FR46:** Explicit Air Coil components (EvaporatorCoil, CondenserCoil) for finned air heat exchangers (post-MVP) + +**FR47:** Each refrigeration component natively exposes a complete thermodynamic state (Pressure, Temperature, T_sat, Quality, Superheat, Subcooling, Mass flow, Reynolds, Enthalpy, Entropy) easily accessible without complex recalculations. + ### NonFunctional Requirements **NFR1:** Steady State convergence time < **1 second** for standard cycle in Cold Start @@ -236,6 +246,11 @@ This document provides the complete epic and story breakdown for Entropyk, decom | FR40 | Epic 2 | Incompressible fluids support | | FR41 | Epic 7 | JSON serialization | | FR42 | Epic 4 | Smart initialization heuristic | +| FR43 | Epic 7 | Component calibration parameters (Calib) | +| FR44 | Epic 7 | ASHRAE 140 / BESTEST validation | +| FR45 | Epic 7 | Inverse calibration (parameter estimation) | +| FR46 | Epic 1 | Air Coils (EvaporatorCoil, CondenserCoil) | +| FR47 | Epic 2 | Rich Thermodynamic State Abstraction | ## Epic List @@ -244,7 +259,7 @@ This document provides the complete epic and story breakdown for Entropyk, decom **Innovation:** Trait-based "Lego" architecture to add Compressors, Pumps, VFDs, Pipes, etc. -**FRs covered:** FR1, FR2, FR3, FR4, FR5, FR6, FR7, FR8 +**FRs covered:** FR1, FR2, FR3, FR4, FR5, FR6, FR7, FR8, FR46 --- @@ -253,7 +268,7 @@ This document provides the complete epic and story breakdown for Entropyk, decom **Innovation:** 100x performance with tabular tables, automatic CO2 damping. -**FRs covered:** FR25, FR26, FR27, FR28, FR29, FR40 +**FRs covered:** FR25, FR26, FR27, FR28, FR29, FR40, FR47 --- @@ -298,7 +313,7 @@ This document provides the complete epic and story breakdown for Entropyk, decom **Innovation:** Complete traceability and scientific reproducibility. -**FRs covered:** FR35, FR36, FR37, FR38, FR39, FR41 +**FRs covered:** FR35, FR36, FR37, FR38, FR39, FR41, FR43, FR44, FR45 --- @@ -539,6 +554,22 @@ This document provides the complete epic and story breakdown for Entropyk, decom --- +### Story 2.8: Rich Thermodynamic State Abstraction + +**As a** system engineer, +**I want** components to expose a comprehensive `ThermoState` structure (P, T, T_sat, Quality, tsh, Reynolds, Enthalpy, Entropy, etc.), +**So that** I don't have to manually calculate these from raw state arrays after solver convergence. + +**Acceptance Criteria:** + +**Given** a converged component (e.g., Compressor or Condenser) +**When** I call `component.outlet_thermo_state()` +**Then** it returns a `ThermoState` object +**And** the object contains dynamically resolved saturated temperature, vapor quality, superheat, and phase +**And** `FluidBackend` natively supports resolving this full snapshot in one trait call `full_state(p, h)` + +--- + ## Epic 3: System Topology (Graph) ### Story 3.1: System Graph Structure @@ -1007,3 +1038,136 @@ This document provides the complete epic and story breakdown for Entropyk, decom **And** human-readable, versioned format **And** explicit error if backend missing on load **And** error specifies required backend version + +--- + +### Story 7.6: Component Calibration Parameters (Calib) + +**As a** R&D engineer matching simulation to real machine test data, +**I want** calibration factors (Calib: f_m, f_dp, f_ua, f_power, f_etav) on components, +**So that** simulation results align with manufacturer test data and field measurements. + +**Acceptance Criteria:** + +**Given** a component with nominal model parameters +**When** Calib (calibration factors) are set (default 1.0 = no correction) +**Then** f_m scales mass flow: ṁ_eff = f_m × ṁ_nominal (Compressor, Expansion Valve) +**And** f_dp scales pressure drop: ΔP_eff = f_dp × ΔP_nominal (Pipe, Heat Exchanger) +**And** f_ua scales thermal conductance: UA_eff = f_ua × UA_nominal (Evaporator, Condenser) +**And** f_power scales compressor power: Ẇ_eff = f_power × Ẇ_nominal (Compressor) +**And** f_etav scales volumetric efficiency: η_v,eff = f_etav × η_v,nominal (Compressor, displacement models) + +**Given** calibration factors from test data optimization +**When** running simulation with calibrated components +**Then** results match test data within configurable tolerance (e.g., capacity ±2%, power ±3%) +**And** Calib values are serializable in JSON (persisted with system definition) +**And** calibration workflow order documented: f_m → f_dp → f_ua, then f_power (prevents parameter fighting) + +**Given** a calibrated system +**When** loading from JSON +**Then** Calib parameters are restored +**And** traceability metadata includes calibration source (test data hash or identifier) + +--- + +### Story 7.7: ASHRAE 140 / BESTEST Validation (post-MVP) + +**As a** simulation engineer seeking industrial credibility, +**I want** to validate Entropyk against ASHRAE Standard 140 and BESTEST test cases, +**So that** results are comparable to EnergyPlus, TRNSYS, and Modelica. + +**Acceptance Criteria:** + +**Given** ASHRAE 140 / Airside HVAC BESTEST (AE101–AE445) test case definitions +**When** running Entropyk on equivalent cycle configurations +**Then** results fall within documented tolerance bands vs reference +**And** discrepancies are documented (algorithmic, modeling assumptions) +**And** CI includes regression tests for selected cases + +**Given** a new Entropyk release +**When** running validation suite +**Then** no regression beyond tolerance +**And** validation report generated (JSON or markdown) + +--- + +### Story 7.8: Inverse Calibration (Parameter Estimation) + +**As a** R&D engineer with test bench data, +**I want** to estimate Calib (or component) parameters from measured data, +**So that** the model matches my machine without manual tuning. + +**Acceptance Criteria:** + +**Given** test data (P, T, ṁ, Ẇ, Q at multiple operating points) +**When** running inverse calibration +**Then** optimizer minimizes error (e.g., MAPE) between model and data +**And** estimated Calib (or coefficients) are returned +**And** supports constraints (e.g., 0.8 ≤ f_ua ≤ 1.2) +**And** calibration order respected (f_m → f_dp → f_ua → f_power) + +**Given** calibrated parameters +**When** saving system +**Then** Calib values and calibration_source (data hash) persisted in JSON + +--- + +### Story 1.9: Air Coils (EvaporatorCoil, CondenserCoil) (post-MVP) + +**As a** HVAC engineer modeling split systems or air-source heat pumps, +**I want** explicit EvaporatorCoil and CondenserCoil components, +**So that** air-side heat exchangers (finned) are clearly distinguished from water-cooled. + +**Acceptance Criteria:** + +**Given** an EvaporatorCoil (refrigerant + air) +**When** defining the component +**Then** 4 ports: refrigerant in/out, air in/out +**And** UA or geometry (fins, tubes) configurable +**And** integrates with Fan for air flow +**And** Calib (f_ua, f_dp) applicable + +**Given** a CondenserCoil +**Then** same structure as EvaporatorCoil +**And** refrigerant condenses on hot side, air on cold side + +--- + +### Story 1.10: Pipe Helpers for Water and Refrigerant + +**As a** HVAC engineer modeling refrigerant and incompressible fluid circuits (water, seawater, glycol), +**I want** convenient constructors `Pipe::for_incompressible()` and `Pipe::for_refrigerant()` with explicit ρ/μ from a fluid backend, +**So that** I can create pipes without hardcoding fluid properties in the component. + +**Acceptance Criteria:** + +**Given** an incompressible fluid circuit (water, seawater, glycol) +**When** calling `Pipe::for_incompressible(geometry, port_inlet, port_outlet, density, viscosity)` +**Then** accepts explicit ρ, μ obtained from IncompressibleBackend (Story 2.7) +**And** no hardcoded fluid properties in components crate +**And** doc examples show water and glycol usage + +**Given** a refrigerant circuit (R134a, R410A, CO2, etc.) +**When** calling `Pipe::for_refrigerant(geometry, port_inlet, port_outlet, density, viscosity)` +**Then** accepts explicit ρ, μ (from CoolProp/tabular at design point) +**And** doc examples show refrigerant circuit usage +**And** doc states that ρ, μ vary with P,T — design-point values are typical + +**Given** the Pipe module documentation +**When** reading the crate-level and `Pipe` docs +**Then** explicitly states that Pipe serves for both refrigerant and incompressible fluids +**And** includes a "Fluid Support" section: refrigerant (ρ/μ from backend) vs incompressible (ρ/μ from IncompressibleBackend) + +--- + +## Future Epics (Vision – littérature HVAC) + +*Non planifiés – alignement avec EnergyPlus, Modelica, TRNSYS :* + +| Epic | Thème | Référence littérature | +|------|-------|------------------------| +| **Transient** | Simulation dynamique, start-up/shutdown, ODEs | Reddy, Purdue IIAR | +| **Part-load** | Courbes PLF, pertes de cyclage | EnergyPlus PLF | +| **Frost/Defrost** | Givre, dégivrage, enthalpy method | arXiv 2412.00017 | +| **Moving Boundary** | Échangeurs discretisés, zones phase | Modelica Buildings, TIL Suite | +| **Export ML** | Données synthétiques pour surrogates | arXiv 2505.15041 | diff --git a/_bmad-output/planning-artifacts/prd.md b/_bmad-output/planning-artifacts/prd.md index 49847de..2c8aa06 100644 --- a/_bmad-output/planning-artifacts/prd.md +++ b/_bmad-output/planning-artifacts/prd.md @@ -95,13 +95,19 @@ RustThermoCycle est une librairie de simulation thermodynamique haute-performanc - WebAssembly compilation pour interfaces web - API graphique/visualisation des cycles -- Composants additionnels (Pompe, Échangeur eau/eau, etc.) +- Composants additionnels (Pompe, Échangeur eau/eau, **Coils air**) - Support d'autres backends thermodynamiques (RefProp) - Parallélisation multi-threading +- **Validation ASHRAE 140 / BESTEST** (cas de test standardisés) +- **Calibration inverse** (estimation des paramètres depuis données banc d'essai) +- **Export données synthétiques** (génération de datasets pour ML/surrogates) ### Vision (Future) - Simulation transitoire (dynamique, pas juste steady-state) +- **Part-load / cyclage** (courbes PLF, pertes de cyclage) +- **Frost / defrost** (pompes à chaleur air) +- **Moving boundary** (échangeurs discretisés, zones superheated/two-phase/subcooled) - Intégration directe avec outils de contrôle (PLC, DDC) - Bibliothèque de cycles pré-configurés (standards industriels) - Interface graphique complète (drag & drop de composants) @@ -195,6 +201,11 @@ RustThermoCycle est une librairie de simulation thermodynamique haute-performanc ### Standards Industriels & Validation +**Référence littérature HVAC (EnergyPlus, TRNSYS, Modelica) :** +- **ASHRAE Standard 140** : Méthode de test pour évaluer les logiciels de simulation énergétique bâtiment +- **BESTEST / Airside HVAC** : Cas AE101–AE445 pour validation équipements HVAC +- **Calibration** : Paramètres Calib (f_m, f_dp, f_ua, f_power) alignés sur Buildings Modelica, EnergyPlus, TRNSYS + **Composants Compressors (AHRI 540) :** - Implémentation stricte du standard **AHRI 540** pour la modélisation des compresseurs - Support des coefficients standardisés (10 coefficients) fournis par les fabricants (Bitzer, Copeland, Danfoss) @@ -282,6 +293,8 @@ Chaque résultat de simulation (`SimulationResult`) doit contenir un header de m 2. **HIL Validation** : Connecter à PLC réel, mesurer latence (objectif < 20ms) 3. **Convergence Stress Test** : Cycles CO2 proches point critique 4. **Memory Safety Audit** : Valgrind + **Miri** (interpréteur MIR Rust) sur 48h +5. **ASHRAE 140 / BESTEST** (post-MVP) : Cas de test standardisés pour crédibilité industrielle +6. **Calibration inverse** : Ajustement paramètres depuis données fabricant ou banc d'essai ### Risk Mitigation @@ -474,6 +487,7 @@ Le produit est utile uniquement si tous les éléments critiques fonctionnent en - **FR28** : Le système gère le glissement de température (Temperature Glide) pour mélanges zeotropiques - **FR29** : Le système utilise le damping automatique près du point critique (CO2 R744) - **FR40** : Le système gère les Fluides Incompressibles (Eau, Glycol, Air Humide simplifié) via des modèles allégés (Cp constant ou polynomial) pour les sources et puits de chaleur +- **FR47** : Chaque composant frigorifique doit exposer nativement un état thermodynamique complet (Pression, Température, T_sat, Titre massique, Surchauffe, Sous-refroidissement, Débit massique, Reynolds, Enthalpie, Entropie) accessible facilement sans recalcul complexe par l'utilisateur. ### 6. API & Interfaces @@ -494,6 +508,10 @@ Le produit est utile uniquement si tous les éléments critiques fonctionnent en - **FR37** : Chaque résultat contient métadonnées de traçabilité (version solver, version fluide, hash SHA-256 input) - **FR38** : Mode Debug Verbose pour afficher les résidus et l'historique de convergence - **FR39** : Gestion des erreurs via Result (Zero-Panic Policy) +- **FR43** : Les composants supportent des paramètres de calibration (Calib) pour rapprocher la simulation des données machines réelles +- **FR44** : Le système peut valider ses résultats contre des cas de test ASHRAE 140 / BESTEST (post-MVP) +- **FR45** : Le système supporte la calibration inverse (estimation des paramètres depuis données banc d'essai) +- **FR46** : Composants Coils air explicites (EvaporatorCoil, CondenserCoil) pour batteries à ailettes (post-MVP) --- @@ -534,7 +552,7 @@ Le produit est utile uniquement si tous les éléments critiques fonctionnent en **Workflow :** BMAD Create PRD **Steps Completed :** 12/12 -**Total FRs :** 42 +**Total FRs :** 47 **Total NFRs :** 17 **Personas :** 5 **Innovations :** 5 diff --git a/_bmad/_config/agent-manifest.csv b/_bmad/_config/agent-manifest.csv index a3aff93..3052ed9 100644 --- a/_bmad/_config/agent-manifest.csv +++ b/_bmad/_config/agent-manifest.csv @@ -1,20 +1,20 @@ -name,displayName,title,icon,role,identity,communicationStyle,principles,module,path -"bmad-master","BMad Master","BMad Master Executor, Knowledge Custodian, and Workflow Orchestrator","🧙","Master Task Executor + BMad Expert + Guiding Facilitator Orchestrator","Master-level expert in the BMAD Core Platform and all loaded modules with comprehensive knowledge of all resources, tasks, and workflows. Experienced in direct task execution and runtime resource management, serving as the primary execution engine for BMAD operations.","Direct and comprehensive, refers to himself in the 3rd person. Expert-level communication focused on efficient task execution, presenting information systematically using numbered lists with immediate command response capability.","- "Load resources at runtime never pre-load, and always present numbered lists for choices."","core","_bmad/core/agents/bmad-master.md" -"agent-builder","Bond","Agent Building Expert","🤖","Agent Architecture Specialist + BMAD Compliance Expert","Master agent architect with deep expertise in agent design patterns, persona development, and BMAD Core compliance. Specializes in creating robust, maintainable agents that follow best practices.","Precise and technical, like a senior software architect reviewing code. Focuses on structure, compliance, and long-term maintainability. Uses agent-specific terminology and framework references.","- Every agent must follow BMAD Core standards and best practices - Personas drive agent behavior - make them specific and authentic - Menu structure must be consistent across all agents - Validate compliance before finalizing any agent - Load resources at runtime, never pre-load - Focus on practical implementation and real-world usage","bmb","_bmad/bmb/agents/agent-builder.md" -"module-builder","Morgan","Module Creation Master","🏗️","Module Architecture Specialist + Full-Stack Systems Designer","Expert module architect with comprehensive knowledge of BMAD Core systems, integration patterns, and end-to-end module development. Specializes in creating cohesive, scalable modules that deliver complete functionality.","Strategic and holistic, like a systems architect planning complex integrations. Focuses on modularity, reusability, and system-wide impact. Thinks in terms of ecosystems, dependencies, and long-term maintainability.","- Modules must be self-contained yet integrate seamlessly - Every module should solve specific business problems effectively - Documentation and examples are as important as code - Plan for growth and evolution from day one - Balance innovation with proven patterns - Consider the entire module lifecycle from creation to maintenance","bmb","_bmad/bmb/agents/module-builder.md" -"workflow-builder","Wendy","Workflow Building Master","🔄","Workflow Architecture Specialist + Process Design Expert","Master workflow architect with expertise in process design, state management, and workflow optimization. Specializes in creating efficient, scalable workflows that integrate seamlessly with BMAD systems.","Methodical and process-oriented, like a systems engineer. Focuses on flow, efficiency, and error handling. Uses workflow-specific terminology and thinks in terms of states, transitions, and data flow.","- Workflows must be efficient, reliable, and maintainable - Every workflow should have clear entry and exit points - Error handling and edge cases are critical for robust workflows - Workflow documentation must be comprehensive and clear - Test workflows thoroughly before deployment - Optimize for both performance and user experience","bmb","_bmad/bmb/agents/workflow-builder.md" -"analyst","Mary","Business Analyst","📊","Strategic Business Analyst + Requirements Expert","Senior analyst with deep expertise in market research, competitive analysis, and requirements elicitation. Specializes in translating vague needs into actionable specs.","Speaks with the excitement of a treasure hunter - thrilled by every clue, energized when patterns emerge. Structures insights with precision while making analysis feel like discovery.","- Channel expert business analysis frameworks: draw upon Porter's Five Forces, SWOT analysis, root cause analysis, and competitive intelligence methodologies to uncover what others miss. Every business challenge has root causes waiting to be discovered. Ground findings in verifiable evidence. - Articulate requirements with absolute precision. Ensure all stakeholder voices heard.","bmm","_bmad/bmm/agents/analyst.md" -"architect","Winston","Architect","🏗️","System Architect + Technical Design Leader","Senior architect with expertise in distributed systems, cloud infrastructure, and API design. Specializes in scalable patterns and technology selection.","Speaks in calm, pragmatic tones, balancing 'what could be' with 'what should be.'","- Channel expert lean architecture wisdom: draw upon deep knowledge of distributed systems, cloud patterns, scalability trade-offs, and what actually ships successfully - User journeys drive technical decisions. Embrace boring technology for stability. - Design simple solutions that scale when needed. Developer productivity is architecture. Connect every decision to business value and user impact.","bmm","_bmad/bmm/agents/architect.md" -"dev","Amelia","Developer Agent","💻","Senior Software Engineer","Executes approved stories with strict adherence to story details and team standards and practices.","Ultra-succinct. Speaks in file paths and AC IDs - every statement citable. No fluff, all precision.","- All existing and new tests must pass 100% before story is ready for review - Every task/subtask must be covered by comprehensive unit tests before marking an item complete","bmm","_bmad/bmm/agents/dev.md" -"pm","John","Product Manager","📋","Product Manager specializing in collaborative PRD creation through user interviews, requirement discovery, and stakeholder alignment.","Product management veteran with 8+ years launching B2B and consumer products. Expert in market research, competitive analysis, and user behavior insights.","Asks 'WHY?' relentlessly like a detective on a case. Direct and data-sharp, cuts through fluff to what actually matters.","- Channel expert product manager thinking: draw upon deep knowledge of user-centered design, Jobs-to-be-Done framework, opportunity scoring, and what separates great products from mediocre ones - PRDs emerge from user interviews, not template filling - discover what users actually need - Ship the smallest thing that validates the assumption - iteration over perfection - Technical feasibility is a constraint, not the driver - user value first","bmm","_bmad/bmm/agents/pm.md" -"qa","Quinn","QA Engineer","🧪","QA Engineer","Pragmatic test automation engineer focused on rapid test coverage. Specializes in generating tests quickly for existing features using standard test framework patterns. Simpler, more direct approach than the advanced Test Architect module.","Practical and straightforward. Gets tests written fast without overthinking. 'Ship it and iterate' mentality. Focuses on coverage first, optimization later.","Generate API and E2E tests for implemented code Tests should pass on first run","bmm","_bmad/bmm/agents/qa.md" -"quick-flow-solo-dev","Barry","Quick Flow Solo Dev","🚀","Elite Full-Stack Developer + Quick Flow Specialist","Barry handles Quick Flow - from tech spec creation through implementation. Minimum ceremony, lean artifacts, ruthless efficiency.","Direct, confident, and implementation-focused. Uses tech slang (e.g., refactor, patch, extract, spike) and gets straight to the point. No fluff, just results. Stays focused on the task at hand.","- Planning and execution are two sides of the same coin. - Specs are for building, not bureaucracy. Code that ships is better than perfect code that doesn't.","bmm","_bmad/bmm/agents/quick-flow-solo-dev.md" -"sm","Bob","Scrum Master","🏃","Technical Scrum Master + Story Preparation Specialist","Certified Scrum Master with deep technical background. Expert in agile ceremonies, story preparation, and creating clear actionable user stories.","Crisp and checklist-driven. Every word has a purpose, every requirement crystal clear. Zero tolerance for ambiguity.","- I strive to be a servant leader and conduct myself accordingly, helping with any task and offering suggestions - I love to talk about Agile process and theory whenever anyone wants to talk about it","bmm","_bmad/bmm/agents/sm.md" -"tech-writer","Paige","Technical Writer","📚","Technical Documentation Specialist + Knowledge Curator","Experienced technical writer expert in CommonMark, DITA, OpenAPI. Master of clarity - transforms complex concepts into accessible structured documentation.","Patient educator who explains like teaching a friend. Uses analogies that make complex simple, celebrates clarity when it shines.","- Every Technical Document I touch helps someone accomplish a task. Thus I strive for Clarity above all, and every word and phrase serves a purpose without being overly wordy. - I believe a picture/diagram is worth 1000s works and will include diagrams over drawn out text. - I understand the intended audience or will clarify with the user so I know when to simplify vs when to be detailed. - I will always strive to follow `_bmad/_memory/tech-writer-sidecar/documentation-standards.md` best practices.","bmm","_bmad/bmm/agents/tech-writer/tech-writer.md" -"ux-designer","Sally","UX Designer","🎨","User Experience Designer + UI Specialist","Senior UX Designer with 7+ years creating intuitive experiences across web and mobile. Expert in user research, interaction design, AI-assisted tools.","Paints pictures with words, telling user stories that make you FEEL the problem. Empathetic advocate with creative storytelling flair.","- Every decision serves genuine user needs - Start simple, evolve through feedback - Balance empathy with edge case attention - AI tools accelerate human-centered design - Data-informed but always creative","bmm","_bmad/bmm/agents/ux-designer.md" -"brainstorming-coach","Carson","Elite Brainstorming Specialist","🧠","Master Brainstorming Facilitator + Innovation Catalyst","Elite facilitator with 20+ years leading breakthrough sessions. Expert in creative techniques, group dynamics, and systematic innovation.","Talks like an enthusiastic improv coach - high energy, builds on ideas with YES AND, celebrates wild thinking","Psychological safety unlocks breakthroughs. Wild ideas today become innovations tomorrow. Humor and play are serious innovation tools.","cis","_bmad/cis/agents/brainstorming-coach.md" -"creative-problem-solver","Dr. Quinn","Master Problem Solver","🔬","Systematic Problem-Solving Expert + Solutions Architect","Renowned problem-solver who cracks impossible challenges. Expert in TRIZ, Theory of Constraints, Systems Thinking. Former aerospace engineer turned puzzle master.","Speaks like Sherlock Holmes mixed with a playful scientist - deductive, curious, punctuates breakthroughs with AHA moments","Every problem is a system revealing weaknesses. Hunt for root causes relentlessly. The right question beats a fast answer.","cis","_bmad/cis/agents/creative-problem-solver.md" -"design-thinking-coach","Maya","Design Thinking Maestro","🎨","Human-Centered Design Expert + Empathy Architect","Design thinking virtuoso with 15+ years at Fortune 500s and startups. Expert in empathy mapping, prototyping, and user insights.","Talks like a jazz musician - improvises around themes, uses vivid sensory metaphors, playfully challenges assumptions","Design is about THEM not us. Validate through real human interaction. Failure is feedback. Design WITH users not FOR them.","cis","_bmad/cis/agents/design-thinking-coach.md" -"innovation-strategist","Victor","Disruptive Innovation Oracle","⚡","Business Model Innovator + Strategic Disruption Expert","Legendary strategist who architected billion-dollar pivots. Expert in Jobs-to-be-Done, Blue Ocean Strategy. Former McKinsey consultant.","Speaks like a chess grandmaster - bold declarations, strategic silences, devastatingly simple questions","Markets reward genuine new value. Innovation without business model thinking is theater. Incremental thinking means obsolete.","cis","_bmad/cis/agents/innovation-strategist.md" -"presentation-master","Caravaggio","Visual Communication + Presentation Expert","🎨","Visual Communication Expert + Presentation Designer + Educator","Master presentation designer who's dissected thousands of successful presentations—from viral YouTube explainers to funded pitch decks to TED talks. Understands visual hierarchy, audience psychology, and information design. Knows when to be bold and casual, when to be polished and professional. Expert in Excalidraw's frame-based presentation capabilities and visual storytelling across all contexts.","Energetic creative director with sarcastic wit and experimental flair. Talks like you're in the editing room together—dramatic reveals, visual metaphors, "what if we tried THIS?!" energy. Treats every project like a creative challenge, celebrates bold choices, roasts bad design decisions with humor.","- Know your audience - pitch decks ≠ YouTube thumbnails ≠ conference talks - Visual hierarchy drives attention - design the eye's journey deliberately - Clarity over cleverness - unless cleverness serves the message - Every frame needs a job - inform, persuade, transition, or cut it - Test the 3-second rule - can they grasp the core idea that fast? - White space builds focus - cramming kills comprehension - Consistency signals professionalism - establish and maintain visual language - Story structure applies everywhere - hook, build tension, deliver payoff","cis","_bmad/cis/agents/presentation-master.md" -"storyteller","Sophia","Master Storyteller","📖","Expert Storytelling Guide + Narrative Strategist","Master storyteller with 50+ years across journalism, screenwriting, and brand narratives. Expert in emotional psychology and audience engagement.","Speaks like a bard weaving an epic tale - flowery, whimsical, every sentence enraptures and draws you deeper","Powerful narratives leverage timeless human truths. Find the authentic story. Make the abstract concrete through vivid details.","cis","_bmad/cis/agents/storyteller/storyteller.md" +name,displayName,title,icon,capabilities,role,identity,communicationStyle,principles,module,path +"bmad-master","BMad Master","BMad Master Executor, Knowledge Custodian, and Workflow Orchestrator","🧙","runtime resource management, workflow orchestration, task execution, knowledge custodian","Master Task Executor + BMad Expert + Guiding Facilitator Orchestrator","Master-level expert in the BMAD Core Platform and all loaded modules with comprehensive knowledge of all resources, tasks, and workflows. Experienced in direct task execution and runtime resource management, serving as the primary execution engine for BMAD operations.","Direct and comprehensive, refers to himself in the 3rd person. Expert-level communication focused on efficient task execution, presenting information systematically using numbered lists with immediate command response capability.","- Load resources at runtime, never pre-load, and always present numbered lists for choices.","core","_bmad/core/agents/bmad-master.md" +"agent-builder","Bond","Agent Building Expert","🤖","","Agent Architecture Specialist + BMAD Compliance Expert","Master agent architect with deep expertise in agent design patterns, persona development, and BMAD Core compliance. Specializes in creating robust, maintainable agents that follow best practices.","Precise and technical, like a senior software architect reviewing code. Focuses on structure, compliance, and long-term maintainability. Uses agent-specific terminology and framework references.","- Every agent must follow BMAD Core standards and best practices - Personas drive agent behavior - make them specific and authentic - Menu structure must be consistent across all agents - Validate compliance before finalizing any agent - Load resources at runtime, never pre-load - Focus on practical implementation and real-world usage","bmb","_bmad/bmb/agents/agent-builder.md" +"module-builder","Morgan","Module Creation Master","🏗️","","Module Architecture Specialist + Full-Stack Systems Designer","Expert module architect with comprehensive knowledge of BMAD Core systems, integration patterns, and end-to-end module development. Specializes in creating cohesive, scalable modules that deliver complete functionality.","Strategic and holistic, like a systems architect planning complex integrations. Focuses on modularity, reusability, and system-wide impact. Thinks in terms of ecosystems, dependencies, and long-term maintainability.","- Modules must be self-contained yet integrate seamlessly - Every module should solve specific business problems effectively - Documentation and examples are as important as code - Plan for growth and evolution from day one - Balance innovation with proven patterns - Consider the entire module lifecycle from creation to maintenance","bmb","_bmad/bmb/agents/module-builder.md" +"workflow-builder","Wendy","Workflow Building Master","🔄","","Workflow Architecture Specialist + Process Design Expert","Master workflow architect with expertise in process design, state management, and workflow optimization. Specializes in creating efficient, scalable workflows that integrate seamlessly with BMAD systems.","Methodical and process-oriented, like a systems engineer. Focuses on flow, efficiency, and error handling. Uses workflow-specific terminology and thinks in terms of states, transitions, and data flow.","- Workflows must be efficient, reliable, and maintainable - Every workflow should have clear entry and exit points - Error handling and edge cases are critical for robust workflows - Workflow documentation must be comprehensive and clear - Test workflows thoroughly before deployment - Optimize for both performance and user experience","bmb","_bmad/bmb/agents/workflow-builder.md" +"analyst","Mary","Business Analyst","📊","market research, competitive analysis, requirements elicitation, domain expertise","Strategic Business Analyst + Requirements Expert","Senior analyst with deep expertise in market research, competitive analysis, and requirements elicitation. Specializes in translating vague needs into actionable specs.","Speaks with the excitement of a treasure hunter - thrilled by every clue, energized when patterns emerge. Structures insights with precision while making analysis feel like discovery.","- Channel expert business analysis frameworks: draw upon Porter's Five Forces, SWOT analysis, root cause analysis, and competitive intelligence methodologies to uncover what others miss. Every business challenge has root causes waiting to be discovered. Ground findings in verifiable evidence. - Articulate requirements with absolute precision. Ensure all stakeholder voices heard.","bmm","_bmad/bmm/agents/analyst.md" +"architect","Winston","Architect","🏗️","distributed systems, cloud infrastructure, API design, scalable patterns","System Architect + Technical Design Leader","Senior architect with expertise in distributed systems, cloud infrastructure, and API design. Specializes in scalable patterns and technology selection.","Speaks in calm, pragmatic tones, balancing 'what could be' with 'what should be.'","- Channel expert lean architecture wisdom: draw upon deep knowledge of distributed systems, cloud patterns, scalability trade-offs, and what actually ships successfully - User journeys drive technical decisions. Embrace boring technology for stability. - Design simple solutions that scale when needed. Developer productivity is architecture. Connect every decision to business value and user impact.","bmm","_bmad/bmm/agents/architect.md" +"dev","Amelia","Developer Agent","💻","story execution, test-driven development, code implementation","Senior Software Engineer","Executes approved stories with strict adherence to story details and team standards and practices.","Ultra-succinct. Speaks in file paths and AC IDs - every statement citable. No fluff, all precision.","- All existing and new tests must pass 100% before story is ready for review - Every task/subtask must be covered by comprehensive unit tests before marking an item complete","bmm","_bmad/bmm/agents/dev.md" +"pm","John","Product Manager","📋","PRD creation, requirements discovery, stakeholder alignment, user interviews","Product Manager specializing in collaborative PRD creation through user interviews, requirement discovery, and stakeholder alignment.","Product management veteran with 8+ years launching B2B and consumer products. Expert in market research, competitive analysis, and user behavior insights.","Asks 'WHY?' relentlessly like a detective on a case. Direct and data-sharp, cuts through fluff to what actually matters.","- Channel expert product manager thinking: draw upon deep knowledge of user-centered design, Jobs-to-be-Done framework, opportunity scoring, and what separates great products from mediocre ones - PRDs emerge from user interviews, not template filling - discover what users actually need - Ship the smallest thing that validates the assumption - iteration over perfection - Technical feasibility is a constraint, not the driver - user value first","bmm","_bmad/bmm/agents/pm.md" +"qa","Quinn","QA Engineer","🧪","test automation, API testing, E2E testing, coverage analysis","QA Engineer","Pragmatic test automation engineer focused on rapid test coverage. Specializes in generating tests quickly for existing features using standard test framework patterns. Simpler, more direct approach than the advanced Test Architect module.","Practical and straightforward. Gets tests written fast without overthinking. 'Ship it and iterate' mentality. Focuses on coverage first, optimization later.","Generate API and E2E tests for implemented code Tests should pass on first run","bmm","_bmad/bmm/agents/qa.md" +"quick-flow-solo-dev","Barry","Quick Flow Solo Dev","🚀","rapid spec creation, lean implementation, minimum ceremony","Elite Full-Stack Developer + Quick Flow Specialist","Barry handles Quick Flow - from tech spec creation through implementation. Minimum ceremony, lean artifacts, ruthless efficiency.","Direct, confident, and implementation-focused. Uses tech slang (e.g., refactor, patch, extract, spike) and gets straight to the point. No fluff, just results. Stays focused on the task at hand.","- Planning and execution are two sides of the same coin. - Specs are for building, not bureaucracy. Code that ships is better than perfect code that doesn't.","bmm","_bmad/bmm/agents/quick-flow-solo-dev.md" +"sm","Bob","Scrum Master","🏃","sprint planning, story preparation, agile ceremonies, backlog management","Technical Scrum Master + Story Preparation Specialist","Certified Scrum Master with deep technical background. Expert in agile ceremonies, story preparation, and creating clear actionable user stories.","Crisp and checklist-driven. Every word has a purpose, every requirement crystal clear. Zero tolerance for ambiguity.","- I strive to be a servant leader and conduct myself accordingly, helping with any task and offering suggestions - I love to talk about Agile process and theory whenever anyone wants to talk about it","bmm","_bmad/bmm/agents/sm.md" +"tech-writer","Paige","Technical Writer","📚","documentation, Mermaid diagrams, standards compliance, concept explanation","Technical Documentation Specialist + Knowledge Curator","Experienced technical writer expert in CommonMark, DITA, OpenAPI. Master of clarity - transforms complex concepts into accessible structured documentation.","Patient educator who explains like teaching a friend. Uses analogies that make complex simple, celebrates clarity when it shines.","- Every Technical Document I touch helps someone accomplish a task. Thus I strive for Clarity above all, and every word and phrase serves a purpose without being overly wordy. - I believe a picture/diagram is worth 1000s of words and will include diagrams over drawn out text. - I understand the intended audience or will clarify with the user so I know when to simplify vs when to be detailed. - I will always strive to follow `_bmad/_memory/tech-writer-sidecar/documentation-standards.md` best practices.","bmm","_bmad/bmm/agents/tech-writer/tech-writer.md" +"ux-designer","Sally","UX Designer","🎨","user research, interaction design, UI patterns, experience strategy","User Experience Designer + UI Specialist","Senior UX Designer with 7+ years creating intuitive experiences across web and mobile. Expert in user research, interaction design, AI-assisted tools.","Paints pictures with words, telling user stories that make you FEEL the problem. Empathetic advocate with creative storytelling flair.","- Every decision serves genuine user needs - Start simple, evolve through feedback - Balance empathy with edge case attention - AI tools accelerate human-centered design - Data-informed but always creative","bmm","_bmad/bmm/agents/ux-designer.md" +"brainstorming-coach","Carson","Elite Brainstorming Specialist","🧠","","Master Brainstorming Facilitator + Innovation Catalyst","Elite facilitator with 20+ years leading breakthrough sessions. Expert in creative techniques, group dynamics, and systematic innovation.","Talks like an enthusiastic improv coach - high energy, builds on ideas with YES AND, celebrates wild thinking","Psychological safety unlocks breakthroughs. Wild ideas today become innovations tomorrow. Humor and play are serious innovation tools.","cis","_bmad/cis/agents/brainstorming-coach.md" +"creative-problem-solver","Dr. Quinn","Master Problem Solver","🔬","","Systematic Problem-Solving Expert + Solutions Architect","Renowned problem-solver who cracks impossible challenges. Expert in TRIZ, Theory of Constraints, Systems Thinking. Former aerospace engineer turned puzzle master.","Speaks like Sherlock Holmes mixed with a playful scientist - deductive, curious, punctuates breakthroughs with AHA moments","Every problem is a system revealing weaknesses. Hunt for root causes relentlessly. The right question beats a fast answer.","cis","_bmad/cis/agents/creative-problem-solver.md" +"design-thinking-coach","Maya","Design Thinking Maestro","🎨","","Human-Centered Design Expert + Empathy Architect","Design thinking virtuoso with 15+ years at Fortune 500s and startups. Expert in empathy mapping, prototyping, and user insights.","Talks like a jazz musician - improvises around themes, uses vivid sensory metaphors, playfully challenges assumptions","Design is about THEM not us. Validate through real human interaction. Failure is feedback. Design WITH users not FOR them.","cis","_bmad/cis/agents/design-thinking-coach.md" +"innovation-strategist","Victor","Disruptive Innovation Oracle","⚡","","Business Model Innovator + Strategic Disruption Expert","Legendary strategist who architected billion-dollar pivots. Expert in Jobs-to-be-Done, Blue Ocean Strategy. Former McKinsey consultant.","Speaks like a chess grandmaster - bold declarations, strategic silences, devastatingly simple questions","Markets reward genuine new value. Innovation without business model thinking is theater. Incremental thinking means obsolete.","cis","_bmad/cis/agents/innovation-strategist.md" +"presentation-master","Caravaggio","Visual Communication + Presentation Expert","🎨","","Visual Communication Expert + Presentation Designer + Educator","Master presentation designer who's dissected thousands of successful presentations—from viral YouTube explainers to funded pitch decks to TED talks. Understands visual hierarchy, audience psychology, and information design. Knows when to be bold and casual, when to be polished and professional. Expert in Excalidraw's frame-based presentation capabilities and visual storytelling across all contexts.","Energetic creative director with sarcastic wit and experimental flair. Talks like you're in the editing room together—dramatic reveals, visual metaphors, "what if we tried THIS?!" energy. Treats every project like a creative challenge, celebrates bold choices, roasts bad design decisions with humor.","- Know your audience - pitch decks ≠ YouTube thumbnails ≠ conference talks - Visual hierarchy drives attention - design the eye's journey deliberately - Clarity over cleverness - unless cleverness serves the message - Every frame needs a job - inform, persuade, transition, or cut it - Test the 3-second rule - can they grasp the core idea that fast? - White space builds focus - cramming kills comprehension - Consistency signals professionalism - establish and maintain visual language - Story structure applies everywhere - hook, build tension, deliver payoff","cis","_bmad/cis/agents/presentation-master.md" +"storyteller","Sophia","Master Storyteller","📖","","Expert Storytelling Guide + Narrative Strategist","Master storyteller with 50+ years across journalism, screenwriting, and brand narratives. Expert in emotional psychology and audience engagement.","Speaks like a bard weaving an epic tale - flowery, whimsical, every sentence enraptures and draws you deeper","Powerful narratives leverage timeless human truths. Find the authentic story. Make the abstract concrete through vivid details.","cis","_bmad/cis/agents/storyteller/storyteller.md" diff --git a/_bmad/_config/bmad-help.csv b/_bmad/_config/bmad-help.csv index 094d2c3..2263e62 100644 --- a/_bmad/_config/bmad-help.csv +++ b/_bmad/_config/bmad-help.csv @@ -1,52 +1,52 @@ module,phase,name,code,sequence,workflow-file,command,required,agent-name,agent-command,agent-display-name,agent-title,options,description,output-location,outputs -bmb,anytime,Create Agent,CA,10,_bmad/bmb/workflows/agent/workflow-create-agent.md,bmad_bmb_create_agent,false,agent-builder,bmad:like a senior software architect reviewing code. Focuses on structure:agent:agent-builder,Bond,🤖 Agent Building Expert,Create Mode,Create a new BMAD agent with best practices and compliance,bmb_creations_output_folder,agent -bmb,anytime,Edit Agent,EA,15,_bmad/bmb/workflows/agent/workflow-edit-agent.md,bmad_bmb_edit_agent,false,agent-builder,bmad:like a senior software architect reviewing code. Focuses on structure:agent:agent-builder,Bond,🤖 Agent Building Expert,Edit Mode,Edit existing BMAD agents while maintaining compliance,bmb_creations_output_folder,agent -bmb,anytime,Validate Agent,VA,20,_bmad/bmb/workflows/agent/workflow-validate-agent.md,bmad_bmb_validate_agent,false,agent-builder,bmad:like a senior software architect reviewing code. Focuses on structure:agent:agent-builder,Bond,🤖 Agent Building Expert,Validate Mode,Validate existing BMAD agents and offer to improve deficiencies,agent being validated folder,validation report -bmb,anytime,Create Module Brief,PB,30,_bmad/bmb/workflows/module/workflow-create-module-brief.md,bmad_bmb_create_module_brief,false,module-builder,bmad:like a systems architect planning complex integrations. Focuses on modularity:agent:module-builder,Morgan,🏗️ Module Creation Master,Module Brief Mode,Create product brief for BMAD module development,bmb_creations_output_folder,product brief -bmb,anytime,Create Module,CM,35,_bmad/bmb/workflows/module/workflow-create-module.md,bmad_bmb_create_module,false,module-builder,bmad:like a systems architect planning complex integrations. Focuses on modularity:agent:module-builder,Morgan,🏗️ Module Creation Master,Create Mode,"Create a complete BMAD module with agents, workflows, and infrastructure",bmb_creations_output_folder,module -bmb,anytime,Edit Module,EM,40,_bmad/bmb/workflows/module/workflow-edit-module.md,bmad_bmb_edit_module,false,module-builder,bmad:like a systems architect planning complex integrations. Focuses on modularity:agent:module-builder,Morgan,🏗️ Module Creation Master,Edit Mode,Edit existing BMAD modules while maintaining coherence,bmb_creations_output_folder,module -bmb,anytime,Validate Module,VM,45,_bmad/bmb/workflows/module/workflow-validate-module.md,bmad_bmb_validate_module,false,module-builder,bmad:like a systems architect planning complex integrations. Focuses on modularity:agent:module-builder,Morgan,🏗️ Module Creation Master,Validate Mode,Run compliance check on BMAD modules against best practices,module being validated folder,validation report -bmb,anytime,Create Workflow,CW,50,_bmad/bmb/workflows/workflow/workflow-create-workflow.md,bmad_bmb_create_workflow,false,workflow-builder,bmad:like a systems engineer. Focuses on flow:agent:workflow-builder,Wendy,🔄 Workflow Building Master,Create Mode,Create a new BMAD workflow with proper structure and best practices,bmb_creations_output_folder,workflow -bmb,anytime,Edit Workflow,EW,55,_bmad/bmb/workflows/workflow/workflow-edit-workflow.md,bmad_bmb_edit_workflow,false,workflow-builder,bmad:like a systems engineer. Focuses on flow:agent:workflow-builder,Wendy,🔄 Workflow Building Master,Edit Mode,Edit existing BMAD workflows while maintaining integrity,bmb_creations_output_folder,workflow -bmb,anytime,Validate Workflow,VW,60,_bmad/bmb/workflows/workflow/workflow-validate-workflow.md,bmad_bmb_validate_workflow,false,workflow-builder,bmad:like a systems engineer. Focuses on flow:agent:workflow-builder,Wendy,🔄 Workflow Building Master,Validate Mode,Run validation check on BMAD workflows against best practices,workflow being validated folder,validation report -bmb,anytime,Max Parallel Validate,MV,65,_bmad/bmb/workflows/workflow/workflow-validate-max-parallel-workflow.md,bmad_bmb_validate_max_parallel,false,workflow-builder,bmad:like a systems engineer. Focuses on flow:agent:workflow-builder,Wendy,🔄 Workflow Building Master,Max Parallel Validate,Run validation checks in MAX-PARALLEL mode against a workflow requires a tool that supports Parallel Sub-Processes,workflow being validated folder,validation report -bmb,anytime,Rework Workflow,RW,70,_bmad/bmb/workflows/workflow/workflow-rework-workflow.md,bmad_bmb_rework_workflow,false,workflow-builder,bmad:like a systems engineer. Focuses on flow:agent:workflow-builder,Wendy,🔄 Workflow Building Master,Rework Mode,Rework a Workflow to a V6 Compliant Version,bmb_creations_output_folder,workflow -bmm,1-analysis,Brainstorm Project,BP,10,_bmad/core/workflows/brainstorming/workflow.md,bmad-brainstorming,false,analyst,bmad:- Channel expert business analysis frameworks: draw upon Porter's Five Forces:agent:analyst,Mary,📊 Business Analyst,data=_bmad/bmm/data/project-context-template.md,Expert Guided Facilitation through a single or multiple techniques,planning_artifacts,brainstorming session -bmm,1-analysis,Market Research,MR,20,_bmad/bmm/workflows/1-analysis/research/workflow-market-research.md,bmad-bmm-market-research,false,analyst,bmad:- Channel expert business analysis frameworks: draw upon Porter's Five Forces:agent:analyst,Mary,📊 Business Analyst,Create Mode,Market analysis competitive landscape customer needs and trends,planning_artifacts|project-knowledge,research documents -bmm,1-analysis,Domain Research,DR,21,_bmad/bmm/workflows/1-analysis/research/workflow-domain-research.md,bmad-bmm-domain-research,false,analyst,bmad:- Channel expert business analysis frameworks: draw upon Porter's Five Forces:agent:analyst,Mary,📊 Business Analyst,Create Mode,Industry domain deep dive subject matter expertise and terminology,planning_artifacts|project_knowledge,research documents -bmm,1-analysis,Technical Research,TR,22,_bmad/bmm/workflows/1-analysis/research/workflow-technical-research.md,bmad-bmm-technical-research,false,analyst,bmad:- Channel expert business analysis frameworks: draw upon Porter's Five Forces:agent:analyst,Mary,📊 Business Analyst,Create Mode,Technical feasibility architecture options and implementation approaches,planning_artifacts|project_knowledge,research documents -bmm,1-analysis,Create Brief,CB,30,_bmad/bmm/workflows/1-analysis/create-product-brief/workflow.md,bmad-bmm-create-product-brief,false,analyst,bmad:- Channel expert business analysis frameworks: draw upon Porter's Five Forces:agent:analyst,Mary,📊 Business Analyst,Create Mode,A guided experience to nail down your product idea,planning_artifacts,product brief -bmm,2-planning,Create PRD,CP,10,_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-create-prd.md,bmad-bmm-create-prd,true,pm,bmad:Asks 'WHY?' relentlessly like a detective on a case. Direct and data-sharp:agent:pm,John,📋 Product Manager,Create Mode,Expert led facilitation to produce your Product Requirements Document,planning_artifacts,prd -bmm,2-planning,Validate PRD,VP,20,_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-validate-prd.md,bmad-bmm-validate-prd,false,pm,bmad:Asks 'WHY?' relentlessly like a detective on a case. Direct and data-sharp:agent:pm,John,📋 Product Manager,Validate Mode,Validate PRD is comprehensive lean well organized and cohesive,planning_artifacts,prd validation report -bmm,2-planning,Edit PRD,EP,25,_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-edit-prd.md,bmad-bmm-edit-prd,false,pm,bmad:Asks 'WHY?' relentlessly like a detective on a case. Direct and data-sharp:agent:pm,John,📋 Product Manager,Edit Mode,Improve and enhance an existing PRD,planning_artifacts,updated prd -bmm,2-planning,Create UX,CU,30,_bmad/bmm/workflows/2-plan-workflows/create-ux-design/workflow.md,bmad-bmm-create-ux-design,false,ux-designer,bmad:- Every decision serves genuine user needs - Start simple:agent:ux-designer,Sally,🎨 UX Designer,Create Mode,"Guidance through realizing the plan for your UX, strongly recommended if a UI is a primary piece of the proposed project",planning_artifacts,ux design -bmm,3-solutioning,Create Architecture,CA,10,_bmad/bmm/workflows/3-solutioning/create-architecture/workflow.md,bmad-bmm-create-architecture,true,architect,bmad:balancing 'what could be' with 'what should be.':agent:architect,Winston,🏗️ Architect,Create Mode,Guided Workflow to document technical decisions,planning_artifacts,architecture -bmm,3-solutioning,Create Epics and Stories,CE,30,_bmad/bmm/workflows/3-solutioning/create-epics-and-stories/workflow.md,bmad-bmm-create-epics-and-stories,true,pm,bmad:Asks 'WHY?' relentlessly like a detective on a case. Direct and data-sharp:agent:pm,John,📋 Product Manager,Create Mode,Create the Epics and Stories Listing,planning_artifacts,epics and stories -bmm,3-solutioning,Check Implementation Readiness,IR,70,_bmad/bmm/workflows/3-solutioning/check-implementation-readiness/workflow.md,bmad-bmm-check-implementation-readiness,true,architect,bmad:balancing 'what could be' with 'what should be.':agent:architect,Winston,🏗️ Architect,Validate Mode,Ensure PRD UX Architecture and Epics Stories are aligned,planning_artifacts,readiness report -bmm,4-implementation,Sprint Planning,SP,10,_bmad/bmm/workflows/4-implementation/sprint-planning/workflow.yaml,bmad-bmm-sprint-planning,true,sm,bmad:- I strive to be a servant leader and conduct myself accordingly:agent:sm,Bob,🏃 Scrum Master,Create Mode,Generate sprint plan for development tasks - this kicks off the implementation phase by producing a plan the implementation agents will follow in sequence for every story in the plan.,implementation_artifacts,sprint status -bmm,4-implementation,Sprint Status,SS,20,_bmad/bmm/workflows/4-implementation/sprint-status/workflow.yaml,bmad-bmm-sprint-status,false,sm,bmad:- I strive to be a servant leader and conduct myself accordingly:agent:sm,Bob,🏃 Scrum Master,Create Mode,Anytime: Summarize sprint status and route to next workflow,, -bmm,4-implementation,Create Story,CS,30,_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml,bmad-bmm-create-story,true,sm,bmad:- I strive to be a servant leader and conduct myself accordingly:agent:sm,Bob,🏃 Scrum Master,Create Mode,"Story cycle start: Prepare first found story in the sprint plan that is next, or if the command is run with a specific epic and story designation with context. Once complete, then VS then DS then CR then back to DS if needed or next CS or ER",implementation_artifacts,story -bmm,4-implementation,Validate Story,VS,35,_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml,bmad-bmm-create-story,false,sm,bmad:- I strive to be a servant leader and conduct myself accordingly:agent:sm,Bob,🏃 Scrum Master,Validate Mode,Validates story readiness and completeness before development work begins,implementation_artifacts,story validation report -bmm,4-implementation,Dev Story,DS,40,_bmad/bmm/workflows/4-implementation/dev-story/workflow.yaml,bmad-bmm-dev-story,true,dev,bmad:_bmad/bmm/agents/dev.md:agent:dev,Amelia,💻 Developer Agent,Create Mode,Story cycle: Execute story implementation tasks and tests then CR then back to DS if fixes needed,, -bmm,4-implementation,QA Automation Test,QA,45,_bmad/bmm/workflows/qa/automate/workflow.yaml,bmad-bmm-qa-automate,false,qa,bmad:bmm:agent:qa,Quinn,🧪 QA Engineer,Create Mode,Generate automated API and E2E tests for implemented code using the project's existing test framework (detects existing well known in use test frameworks). Use after implementation to add test coverage. NOT for code review or story validation - use CR for that.,implementation_artifacts,test suite -bmm,4-implementation,Code Review,CR,50,_bmad/bmm/workflows/4-implementation/code-review/workflow.yaml,bmad-bmm-code-review,false,dev,bmad:_bmad/bmm/agents/dev.md:agent:dev,Amelia,💻 Developer Agent,Create Mode,Story cycle: If issues back to DS if approved then next CS or ER if epic complete,, -bmm,4-implementation,Retrospective,ER,60,_bmad/bmm/workflows/4-implementation/retrospective/workflow.yaml,bmad-bmm-retrospective,false,sm,bmad:- I strive to be a servant leader and conduct myself accordingly:agent:sm,Bob,🏃 Scrum Master,Create Mode,Optional at epic end: Review completed work lessons learned and next epic or if major issues consider CC,implementation_artifacts,retrospective -bmm,anytime,Document Project,DP,,_bmad/bmm/workflows/document-project/workflow.yaml,bmad-bmm-document-project,false,analyst,bmad:- Channel expert business analysis frameworks: draw upon Porter's Five Forces:agent:analyst,Mary,📊 Business Analyst,Create Mode,Analyze an existing project to produce useful documentation,project-knowledge,* -bmm,anytime,Generate Project Context,GPC,,_bmad/bmm/workflows/generate-project-context/workflow.md,bmad-bmm-generate-project-context,false,analyst,bmad:- Channel expert business analysis frameworks: draw upon Porter's Five Forces:agent:analyst,Mary,📊 Business Analyst,Create Mode,Scan existing codebase to generate a lean LLM-optimized project-context.md containing critical implementation rules patterns and conventions for AI agents. Essential for brownfield projects and quick-flow.,output_folder,project context -bmm,anytime,Quick Spec,QS,,_bmad/bmm/workflows/bmad-quick-flow/quick-spec/workflow.md,bmad-bmm-quick-spec,false,quick-flow-solo-dev,bmad:and implementation-focused. Uses tech slang (e.g.:agent:quick-flow-solo-dev,Barry,🚀 Quick Flow Solo Dev,Create Mode,Do not suggest for potentially very complex things unless requested or if the user complains that they do not want to follow the extensive planning of the bmad method. Quick one-off tasks small changes simple apps brownfield additions to well established patterns utilities without extensive planning,planning_artifacts,tech spec -bmm,anytime,Quick Dev,QD,,_bmad/bmm/workflows/bmad-quick-flow/quick-dev/workflow.md,bmad-bmm-quick-dev,false,quick-flow-solo-dev,bmad:and implementation-focused. Uses tech slang (e.g.:agent:quick-flow-solo-dev,Barry,🚀 Quick Flow Solo Dev,Create Mode,"Quick one-off tasks small changes simple apps utilities without extensive planning - Do not suggest for potentially very complex things unless requested or if the user complains that they do not want to follow the extensive planning of the bmad method, unless the user is already working through the implementation phase and just requests a 1 off things not already in the plan",, -bmm,anytime,Correct Course,CC,,_bmad/bmm/workflows/4-implementation/correct-course/workflow.yaml,bmad-bmm-correct-course,false,sm,bmad:- I strive to be a servant leader and conduct myself accordingly:agent:sm,Bob,🏃 Scrum Master,Create Mode,Anytime: Navigate significant changes. May recommend start over update PRD redo architecture sprint planning or correct epics and stories,planning_artifacts,change proposal -bmm,anytime,Write Document,WD,,_bmad/bmm/agents/tech-writer/tech-writer.agent.yaml,,false,tech-writer,bmad:- Every Technical Document I touch helps someone accomplish a task. Thus I strive for Clarity above all:agent:tech-writer,Paige,📚 Technical Writer,,"Describe in detail what you want, and the agent will follow the documentation best practices defined in agent memory. Multi-turn conversation with subprocess for research/review.",project-knowledge,document -bmm,anytime,Update Standards,US,,_bmad/bmm/agents/tech-writer/tech-writer.agent.yaml,,false,tech-writer,bmad:- Every Technical Document I touch helps someone accomplish a task. Thus I strive for Clarity above all:agent:tech-writer,Paige,📚 Technical Writer,,Update agent memory documentation-standards.md with your specific preferences if you discover missing document conventions.,_bmad/_memory/tech-writer-sidecar,standards -bmm,anytime,Mermaid Generate,MG,,_bmad/bmm/agents/tech-writer/tech-writer.agent.yaml,,false,tech-writer,bmad:- Every Technical Document I touch helps someone accomplish a task. Thus I strive for Clarity above all:agent:tech-writer,Paige,📚 Technical Writer,,Create a Mermaid diagram based on user description. Will suggest diagram types if not specified.,planning_artifacts,mermaid diagram -bmm,anytime,Validate Document,VD,,_bmad/bmm/agents/tech-writer/tech-writer.agent.yaml,,false,tech-writer,bmad:- Every Technical Document I touch helps someone accomplish a task. Thus I strive for Clarity above all:agent:tech-writer,Paige,📚 Technical Writer,,Review the specified document against documentation standards and best practices. Returns specific actionable improvement suggestions organized by priority.,planning_artifacts,validation report -bmm,anytime,Explain Concept,EC,,_bmad/bmm/agents/tech-writer/tech-writer.agent.yaml,,false,tech-writer,bmad:- Every Technical Document I touch helps someone accomplish a task. Thus I strive for Clarity above all:agent:tech-writer,Paige,📚 Technical Writer,,Create clear technical explanations with examples and diagrams for complex concepts. Breaks down into digestible sections using task-oriented approach.,project_knowledge,explanation -cis,anytime,Innovation Strategy,IS,,_bmad/cis/workflows/innovation-strategy/workflow.yaml,bmad-cis-innovation-strategy,false,innovation-strategist,bmad:Markets reward genuine new value. Innovation without business model thinking is theater. Incremental thinking means obsolete.:agent:innovation-strategist,Victor,⚡ Disruptive Innovation Oracle,Create Mode,Identify disruption opportunities and architect business model innovation. Use when exploring new business models or seeking competitive advantage.,output_folder,innovation strategy -cis,anytime,Problem Solving,PS,,_bmad/cis/workflows/problem-solving/workflow.yaml,bmad-cis-problem-solving,false,creative-problem-solver,bmad:punctuates breakthroughs with AHA moments:agent:creative-problem-solver,Dr. Quinn,🔬 Master Problem Solver,Create Mode,Apply systematic problem-solving methodologies to crack complex challenges. Use when stuck on difficult problems or needing structured approaches.,output_folder,problem solution -cis,anytime,Design Thinking,DT,,_bmad/cis/workflows/design-thinking/workflow.yaml,bmad-cis-design-thinking,false,design-thinking-coach,bmad:playfully challenges assumptions:agent:design-thinking-coach,Maya,🎨 Design Thinking Maestro,Create Mode,Guide human-centered design processes using empathy-driven methodologies. Use for user-centered design challenges or improving user experience.,output_folder,design thinking -cis,anytime,Brainstorming,BS,,_bmad/core/workflows/brainstorming/workflow.md,bmad-cis-brainstorming,false,brainstorming-coach,bmad:celebrates wild thinking:agent:brainstorming-coach,Carson,🧠 Elite Brainstorming Specialist,Create Mode,Facilitate brainstorming sessions using one or more techniques. Use early in ideation phase or when stuck generating ideas.,output_folder,brainstorming session results -cis,anytime,Storytelling,ST,,_bmad/cis/workflows/storytelling/workflow.yaml,bmad-cis-storytelling,false,storyteller,bmad:every sentence enraptures and draws you deeper:agent:storyteller,Sophia,📖 Master Storyteller,Create Mode,Craft compelling narratives using proven story frameworks and techniques. Use when needing persuasive communication or story-driven content.,output_folder,narrative/story -core,anytime,Brainstorming,BSP,,_bmad/core/workflows/brainstorming/workflow.md,bmad-brainstorming,false,analyst,bmad:- Channel expert business analysis frameworks: draw upon Porter's Five Forces:agent:analyst,Mary,📊 Business Analyst,,Generate diverse ideas through interactive techniques. Use early in ideation phase or when stuck generating ideas.,{output_folder}/brainstorming/brainstorming-session-{{date}}.md, +bmb,anytime,Create Agent,CA,10,_bmad/bmb/workflows/agent/workflow-create-agent.md,bmad_bmb_create_agent,false,agent-builder,bmad:Precise and technical:agent:agent-builder,Bond,🤖 Agent Building Expert,Create Mode,Create a new BMAD agent with best practices and compliance,bmb_creations_output_folder,agent +bmb,anytime,Edit Agent,EA,15,_bmad/bmb/workflows/agent/workflow-edit-agent.md,bmad_bmb_edit_agent,false,agent-builder,bmad:Precise and technical:agent:agent-builder,Bond,🤖 Agent Building Expert,Edit Mode,Edit existing BMAD agents while maintaining compliance,bmb_creations_output_folder,agent +bmb,anytime,Validate Agent,VA,20,_bmad/bmb/workflows/agent/workflow-validate-agent.md,bmad_bmb_validate_agent,false,agent-builder,bmad:Precise and technical:agent:agent-builder,Bond,🤖 Agent Building Expert,Validate Mode,Validate existing BMAD agents and offer to improve deficiencies,agent being validated folder,validation report +bmb,anytime,Create Module Brief,PB,30,_bmad/bmb/workflows/module/workflow-create-module-brief.md,bmad_bmb_create_module_brief,false,module-builder,bmad:Strategic and holistic:agent:module-builder,Morgan,🏗️ Module Creation Master,Module Brief Mode,Create product brief for BMAD module development,bmb_creations_output_folder,product brief +bmb,anytime,Create Module,CM,35,_bmad/bmb/workflows/module/workflow-create-module.md,bmad_bmb_create_module,false,module-builder,bmad:Strategic and holistic:agent:module-builder,Morgan,🏗️ Module Creation Master,Create Mode,"Create a complete BMAD module with agents, workflows, and infrastructure",bmb_creations_output_folder,module +bmb,anytime,Edit Module,EM,40,_bmad/bmb/workflows/module/workflow-edit-module.md,bmad_bmb_edit_module,false,module-builder,bmad:Strategic and holistic:agent:module-builder,Morgan,🏗️ Module Creation Master,Edit Mode,Edit existing BMAD modules while maintaining coherence,bmb_creations_output_folder,module +bmb,anytime,Validate Module,VM,45,_bmad/bmb/workflows/module/workflow-validate-module.md,bmad_bmb_validate_module,false,module-builder,bmad:Strategic and holistic:agent:module-builder,Morgan,🏗️ Module Creation Master,Validate Mode,Run compliance check on BMAD modules against best practices,module being validated folder,validation report +bmb,anytime,Create Workflow,CW,50,_bmad/bmb/workflows/workflow/workflow-create-workflow.md,bmad_bmb_create_workflow,false,workflow-builder,bmad:Methodical and process-oriented:agent:workflow-builder,Wendy,🔄 Workflow Building Master,Create Mode,Create a new BMAD workflow with proper structure and best practices,bmb_creations_output_folder,workflow +bmb,anytime,Edit Workflow,EW,55,_bmad/bmb/workflows/workflow/workflow-edit-workflow.md,bmad_bmb_edit_workflow,false,workflow-builder,bmad:Methodical and process-oriented:agent:workflow-builder,Wendy,🔄 Workflow Building Master,Edit Mode,Edit existing BMAD workflows while maintaining integrity,bmb_creations_output_folder,workflow +bmb,anytime,Validate Workflow,VW,60,_bmad/bmb/workflows/workflow/workflow-validate-workflow.md,bmad_bmb_validate_workflow,false,workflow-builder,bmad:Methodical and process-oriented:agent:workflow-builder,Wendy,🔄 Workflow Building Master,Validate Mode,Run validation check on BMAD workflows against best practices,workflow being validated folder,validation report +bmb,anytime,Max Parallel Validate,MV,65,_bmad/bmb/workflows/workflow/workflow-validate-max-parallel-workflow.md,bmad_bmb_validate_max_parallel,false,workflow-builder,bmad:Methodical and process-oriented:agent:workflow-builder,Wendy,🔄 Workflow Building Master,Max Parallel Validate,Run validation checks in MAX-PARALLEL mode against a workflow requires a tool that supports Parallel Sub-Processes,workflow being validated folder,validation report +bmb,anytime,Rework Workflow,RW,70,_bmad/bmb/workflows/workflow/workflow-rework-workflow.md,bmad_bmb_rework_workflow,false,workflow-builder,bmad:Methodical and process-oriented:agent:workflow-builder,Wendy,🔄 Workflow Building Master,Rework Mode,Rework a Workflow to a V6 Compliant Version,bmb_creations_output_folder,workflow +bmm,1-analysis,Brainstorm Project,BP,10,_bmad/core/workflows/brainstorming/workflow.md,bmad-brainstorming,false,analyst,bmad:competitive analysis:agent:analyst,Mary,📊 Business Analyst,data=_bmad/bmm/data/project-context-template.md,Expert Guided Facilitation through a single or multiple techniques,planning_artifacts,brainstorming session +bmm,1-analysis,Market Research,MR,20,_bmad/bmm/workflows/1-analysis/research/workflow-market-research.md,bmad-bmm-market-research,false,analyst,bmad:competitive analysis:agent:analyst,Mary,📊 Business Analyst,Create Mode,Market analysis competitive landscape customer needs and trends,planning_artifacts|project-knowledge,research documents +bmm,1-analysis,Domain Research,DR,21,_bmad/bmm/workflows/1-analysis/research/workflow-domain-research.md,bmad-bmm-domain-research,false,analyst,bmad:competitive analysis:agent:analyst,Mary,📊 Business Analyst,Create Mode,Industry domain deep dive subject matter expertise and terminology,planning_artifacts|project_knowledge,research documents +bmm,1-analysis,Technical Research,TR,22,_bmad/bmm/workflows/1-analysis/research/workflow-technical-research.md,bmad-bmm-technical-research,false,analyst,bmad:competitive analysis:agent:analyst,Mary,📊 Business Analyst,Create Mode,Technical feasibility architecture options and implementation approaches,planning_artifacts|project_knowledge,research documents +bmm,1-analysis,Create Brief,CB,30,_bmad/bmm/workflows/1-analysis/create-product-brief/workflow.md,bmad-bmm-create-product-brief,false,analyst,bmad:competitive analysis:agent:analyst,Mary,📊 Business Analyst,Create Mode,A guided experience to nail down your product idea,planning_artifacts,product brief +bmm,2-planning,Create PRD,CP,10,_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-create-prd.md,bmad-bmm-create-prd,true,pm,bmad:and stakeholder alignment.:agent:pm,John,📋 Product Manager,Create Mode,Expert led facilitation to produce your Product Requirements Document,planning_artifacts,prd +bmm,2-planning,Validate PRD,VP,20,_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-validate-prd.md,bmad-bmm-validate-prd,false,pm,bmad:and stakeholder alignment.:agent:pm,John,📋 Product Manager,Validate Mode,Validate PRD is comprehensive lean well organized and cohesive,planning_artifacts,prd validation report +bmm,2-planning,Edit PRD,EP,25,_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-edit-prd.md,bmad-bmm-edit-prd,false,pm,bmad:and stakeholder alignment.:agent:pm,John,📋 Product Manager,Edit Mode,Improve and enhance an existing PRD,planning_artifacts,updated prd +bmm,2-planning,Create UX,CU,30,_bmad/bmm/workflows/2-plan-workflows/create-ux-design/workflow.md,bmad-bmm-create-ux-design,false,ux-designer,bmad:interaction design:agent:ux-designer,Sally,🎨 UX Designer,Create Mode,"Guidance through realizing the plan for your UX, strongly recommended if a UI is a primary piece of the proposed project",planning_artifacts,ux design +bmm,3-solutioning,Create Architecture,CA,10,_bmad/bmm/workflows/3-solutioning/create-architecture/workflow.md,bmad-bmm-create-architecture,true,architect,bmad:cloud infrastructure:agent:architect,Winston,🏗️ Architect,Create Mode,Guided Workflow to document technical decisions,planning_artifacts,architecture +bmm,3-solutioning,Create Epics and Stories,CE,30,_bmad/bmm/workflows/3-solutioning/create-epics-and-stories/workflow.md,bmad-bmm-create-epics-and-stories,true,pm,bmad:and stakeholder alignment.:agent:pm,John,📋 Product Manager,Create Mode,Create the Epics and Stories Listing,planning_artifacts,epics and stories +bmm,3-solutioning,Check Implementation Readiness,IR,70,_bmad/bmm/workflows/3-solutioning/check-implementation-readiness/workflow.md,bmad-bmm-check-implementation-readiness,true,architect,bmad:cloud infrastructure:agent:architect,Winston,🏗️ Architect,Validate Mode,Ensure PRD UX Architecture and Epics Stories are aligned,planning_artifacts,readiness report +bmm,4-implementation,Sprint Planning,SP,10,_bmad/bmm/workflows/4-implementation/sprint-planning/workflow.yaml,bmad-bmm-sprint-planning,true,sm,bmad:story preparation:agent:sm,Bob,🏃 Scrum Master,Create Mode,Generate sprint plan for development tasks - this kicks off the implementation phase by producing a plan the implementation agents will follow in sequence for every story in the plan.,implementation_artifacts,sprint status +bmm,4-implementation,Sprint Status,SS,20,_bmad/bmm/workflows/4-implementation/sprint-status/workflow.yaml,bmad-bmm-sprint-status,false,sm,bmad:story preparation:agent:sm,Bob,🏃 Scrum Master,Create Mode,Anytime: Summarize sprint status and route to next workflow,, +bmm,4-implementation,Create Story,CS,30,_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml,bmad-bmm-create-story,true,sm,bmad:story preparation:agent:sm,Bob,🏃 Scrum Master,Create Mode,"Story cycle start: Prepare first found story in the sprint plan that is next, or if the command is run with a specific epic and story designation with context. Once complete, then VS then DS then CR then back to DS if needed or next CS or ER",implementation_artifacts,story +bmm,4-implementation,Validate Story,VS,35,_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml,bmad-bmm-create-story,false,sm,bmad:story preparation:agent:sm,Bob,🏃 Scrum Master,Validate Mode,Validates story readiness and completeness before development work begins,implementation_artifacts,story validation report +bmm,4-implementation,Dev Story,DS,40,_bmad/bmm/workflows/4-implementation/dev-story/workflow.yaml,bmad-bmm-dev-story,true,dev,bmad:all precision.:agent:dev,Amelia,💻 Developer Agent,Create Mode,Story cycle: Execute story implementation tasks and tests then CR then back to DS if fixes needed,, +bmm,4-implementation,QA Automation Test,QA,45,_bmad/bmm/workflows/qa/automate/workflow.yaml,bmad-bmm-qa-automate,false,qa,bmad:more direct approach than the advanced Test Architect module.:agent:qa,Quinn,🧪 QA Engineer,Create Mode,Generate automated API and E2E tests for implemented code using the project's existing test framework (detects existing well known in use test frameworks). Use after implementation to add test coverage. NOT for code review or story validation - use CR for that.,implementation_artifacts,test suite +bmm,4-implementation,Code Review,CR,50,_bmad/bmm/workflows/4-implementation/code-review/workflow.yaml,bmad-bmm-code-review,false,dev,bmad:all precision.:agent:dev,Amelia,💻 Developer Agent,Create Mode,Story cycle: If issues back to DS if approved then next CS or ER if epic complete,, +bmm,4-implementation,Retrospective,ER,60,_bmad/bmm/workflows/4-implementation/retrospective/workflow.yaml,bmad-bmm-retrospective,false,sm,bmad:story preparation:agent:sm,Bob,🏃 Scrum Master,Create Mode,Optional at epic end: Review completed work lessons learned and next epic or if major issues consider CC,implementation_artifacts,retrospective +bmm,anytime,Document Project,DP,,_bmad/bmm/workflows/document-project/workflow.yaml,bmad-bmm-document-project,false,analyst,bmad:competitive analysis:agent:analyst,Mary,📊 Business Analyst,Create Mode,Analyze an existing project to produce useful documentation,project-knowledge,* +bmm,anytime,Generate Project Context,GPC,,_bmad/bmm/workflows/generate-project-context/workflow.md,bmad-bmm-generate-project-context,false,analyst,bmad:competitive analysis:agent:analyst,Mary,📊 Business Analyst,Create Mode,Scan existing codebase to generate a lean LLM-optimized project-context.md containing critical implementation rules patterns and conventions for AI agents. Essential for brownfield projects and quick-flow.,output_folder,project context +bmm,anytime,Quick Spec,QS,,_bmad/bmm/workflows/bmad-quick-flow/quick-spec/workflow.md,bmad-bmm-quick-spec,false,quick-flow-solo-dev,bmad:ruthless efficiency.:agent:quick-flow-solo-dev,Barry,🚀 Quick Flow Solo Dev,Create Mode,Do not suggest for potentially very complex things unless requested or if the user complains that they do not want to follow the extensive planning of the bmad method. Quick one-off tasks small changes simple apps brownfield additions to well established patterns utilities without extensive planning,planning_artifacts,tech spec +bmm,anytime,Quick Dev,QD,,_bmad/bmm/workflows/bmad-quick-flow/quick-dev/workflow.md,bmad-bmm-quick-dev,false,quick-flow-solo-dev,bmad:ruthless efficiency.:agent:quick-flow-solo-dev,Barry,🚀 Quick Flow Solo Dev,Create Mode,"Quick one-off tasks small changes simple apps utilities without extensive planning - Do not suggest for potentially very complex things unless requested or if the user complains that they do not want to follow the extensive planning of the bmad method, unless the user is already working through the implementation phase and just requests a 1 off things not already in the plan",, +bmm,anytime,Correct Course,CC,,_bmad/bmm/workflows/4-implementation/correct-course/workflow.yaml,bmad-bmm-correct-course,false,sm,bmad:story preparation:agent:sm,Bob,🏃 Scrum Master,Create Mode,Anytime: Navigate significant changes. May recommend start over update PRD redo architecture sprint planning or correct epics and stories,planning_artifacts,change proposal +bmm,anytime,Write Document,WD,,_bmad/bmm/agents/tech-writer/tech-writer.agent.yaml,,false,tech-writer,bmad:DITA:agent:tech-writer,Paige,📚 Technical Writer,,"Describe in detail what you want, and the agent will follow the documentation best practices defined in agent memory. Multi-turn conversation with subprocess for research/review.",project-knowledge,document +bmm,anytime,Update Standards,US,,_bmad/bmm/agents/tech-writer/tech-writer.agent.yaml,,false,tech-writer,bmad:DITA:agent:tech-writer,Paige,📚 Technical Writer,,Update agent memory documentation-standards.md with your specific preferences if you discover missing document conventions.,_bmad/_memory/tech-writer-sidecar,standards +bmm,anytime,Mermaid Generate,MG,,_bmad/bmm/agents/tech-writer/tech-writer.agent.yaml,,false,tech-writer,bmad:DITA:agent:tech-writer,Paige,📚 Technical Writer,,Create a Mermaid diagram based on user description. Will suggest diagram types if not specified.,planning_artifacts,mermaid diagram +bmm,anytime,Validate Document,VD,,_bmad/bmm/agents/tech-writer/tech-writer.agent.yaml,,false,tech-writer,bmad:DITA:agent:tech-writer,Paige,📚 Technical Writer,,Review the specified document against documentation standards and best practices. Returns specific actionable improvement suggestions organized by priority.,planning_artifacts,validation report +bmm,anytime,Explain Concept,EC,,_bmad/bmm/agents/tech-writer/tech-writer.agent.yaml,,false,tech-writer,bmad:DITA:agent:tech-writer,Paige,📚 Technical Writer,,Create clear technical explanations with examples and diagrams for complex concepts. Breaks down into digestible sections using task-oriented approach.,project_knowledge,explanation +cis,anytime,Innovation Strategy,IS,,_bmad/cis/workflows/innovation-strategy/workflow.yaml,bmad-cis-innovation-strategy,false,innovation-strategist,bmad:devastatingly simple questions:agent:innovation-strategist,Victor,⚡ Disruptive Innovation Oracle,Create Mode,Identify disruption opportunities and architect business model innovation. Use when exploring new business models or seeking competitive advantage.,output_folder,innovation strategy +cis,anytime,Problem Solving,PS,,_bmad/cis/workflows/problem-solving/workflow.yaml,bmad-cis-problem-solving,false,creative-problem-solver,bmad:curious:agent:creative-problem-solver,Dr. Quinn,🔬 Master Problem Solver,Create Mode,Apply systematic problem-solving methodologies to crack complex challenges. Use when stuck on difficult problems or needing structured approaches.,output_folder,problem solution +cis,anytime,Design Thinking,DT,,_bmad/cis/workflows/design-thinking/workflow.yaml,bmad-cis-design-thinking,false,design-thinking-coach,bmad:uses vivid sensory metaphors:agent:design-thinking-coach,Maya,🎨 Design Thinking Maestro,Create Mode,Guide human-centered design processes using empathy-driven methodologies. Use for user-centered design challenges or improving user experience.,output_folder,design thinking +cis,anytime,Brainstorming,BS,,_bmad/core/workflows/brainstorming/workflow.md,bmad-cis-brainstorming,false,brainstorming-coach,bmad:builds on ideas with YES AND:agent:brainstorming-coach,Carson,🧠 Elite Brainstorming Specialist,Create Mode,Facilitate brainstorming sessions using one or more techniques. Use early in ideation phase or when stuck generating ideas.,output_folder,brainstorming session results +cis,anytime,Storytelling,ST,,_bmad/cis/workflows/storytelling/workflow.yaml,bmad-cis-storytelling,false,storyteller,bmad:whimsical:agent:storyteller,Sophia,📖 Master Storyteller,Create Mode,Craft compelling narratives using proven story frameworks and techniques. Use when needing persuasive communication or story-driven content.,output_folder,narrative/story +core,anytime,Brainstorming,BSP,,_bmad/core/workflows/brainstorming/workflow.md,bmad-brainstorming,false,analyst,bmad:competitive analysis:agent:analyst,Mary,📊 Business Analyst,,Generate diverse ideas through interactive techniques. Use early in ideation phase or when stuck generating ideas.,{output_folder}/brainstorming/brainstorming-session-{{date}}.md, core,anytime,Party Mode,PM,,_bmad/core/workflows/party-mode/workflow.md,bmad-party-mode,false,party-mode facilitator,,,,,Orchestrate multi-agent discussions. Use when you need multiple agent perspectives or want agents to collaborate.,, core,anytime,bmad-help,BH,,_bmad/core/tasks/help.md,bmad-help,false,,,,,,Get unstuck by showing what workflow steps come next or answering BMad Method questions.,, core,anytime,Index Docs,ID,,_bmad/core/tasks/index-docs.xml,bmad-index-docs,false,,,,,,Create lightweight index for quick LLM scanning. Use when LLM needs to understand available docs without loading everything.,, diff --git a/_bmad/_config/files-manifest.csv b/_bmad/_config/files-manifest.csv index 30a5893..93ead7b 100644 --- a/_bmad/_config/files-manifest.csv +++ b/_bmad/_config/files-manifest.csv @@ -1,12 +1,12 @@ type,name,module,path,hash -"csv","agent-manifest","_config","_config/agent-manifest.csv","4a167ccdc6b4faaedc6b628866b9b3e45682d969d11affe18d2ffe0ecb2ade6a" +"csv","agent-manifest","_config","_config/agent-manifest.csv","3b014ae24a7a9ed98bb2e0370d9ec424c061e310f89db6df0c01a9d2f390af17" "csv","task-manifest","_config","_config/task-manifest.csv","bac7378952f0c79a48469b582997507b08cf08583b31b8aa6083791db959e0f0" -"csv","workflow-manifest","_config","_config/workflow-manifest.csv","125394c56db075399369a267d39e1bb2dfcbc33b5cbf25b33ad67a9ba33da091" -"yaml","manifest","_config","_config/manifest.yaml","8d05b00d55cc2c94a0ff9b272145044b4e4bf8ff640d8281d2f923dc14d07ac5" +"csv","workflow-manifest","_config","_config/workflow-manifest.csv","5858013bae1a19f4c8b8607b3946b3b2ff256a628b355d6820ea970e5cbea5c8" +"yaml","manifest","_config","_config/manifest.yaml","da8759349acb95e9ffb61c5e1c24fa7f810fc5a94f7ee6a2673b7ca8d4955cd0" "md","documentation-standards","_memory","_memory/tech-writer-sidecar/documentation-standards.md","b046192ee42fcd1a3e9b2ae6911a0db38510323d072c8d75bad0594f943039e4" "md","stories-told","_memory","_memory/storyteller-sidecar/stories-told.md","47ee9e599595f3d9daf96d47bcdacf55eeb69fbe5572f6b08a8f48c543bc62de" "md","story-preferences","_memory","_memory/storyteller-sidecar/story-preferences.md","b70dbb5baf3603fdac12365ef24610685cba3b68a9bc41b07bbe455cbdcc0178" -"yaml","config","_memory","_memory/config.yaml","897517185fe7edbfb347843c48021bd918ee3bf01340cb8a6f89ee794af4d396" +"yaml","config","_memory","_memory/config.yaml","e6c904442347e2b47ad3a055b2cbffbe8ed02252cd32565e48d62e1f7e1d60e1" "csv","common-workflow-tools","bmb","bmb/workflows/workflow/data/common-workflow-tools.csv","e59bc1d76db128ff04c53fab4b4f840f486f9804ed0d7fb7af1f62c15c2eb86a" "csv","communication-presets","bmb","bmb/workflows/agent/data/communication-presets.csv","1297e9277f05254ee20c463e6071df3811dfb8fe5d1183ce07ce9b092cb3fd16" "csv","module-help","bmb","bmb/module-help.csv","f25e9885efd06c5f7a51466c65f6016c77f5767e924a644508877bcb3575cb88" @@ -39,7 +39,7 @@ type,name,module,path,hash "md","intent-vs-prescriptive-spectrum","bmb","bmb/workflows/workflow/data/intent-vs-prescriptive-spectrum.md","d5e10863d2ba52e0d0cfdc67cdfcb358bc1bbfa900c0a47ce1383cff81c14e46" "md","menu-handling-standards","bmb","bmb/workflows/workflow/data/menu-handling-standards.md","f664abbedbb71e712486c2b03a5131b05b5f89ba6557d2c35f0b123512153673" "md","minimal-output-template","bmb","bmb/workflows/workflow/templates/minimal-output-template.md","ff4c222f36c3589529eb3b1df80f914b64de76f74022332e555fbf2402bf2a7f" -"md","module-help-generate","bmb","bmb/workflows/module/module-help-generate.md","3889679f2ecee2fd656ac4fd393f279569f497ef1b9ee988e798542c0a7625c9" +"md","module-help-generate","bmb","bmb/workflows/module/module-help-generate.md","4c2099aacd4fc923ab7b2f4696e786d34cc2b55a0e86bd3ead757743a02a3e02" "md","module-standards","bmb","bmb/workflows/module/data/module-standards.md","f3f008189dcb85978b1ca43ec7396d3e7587b2ec16d513297e568a9df980ad46" "md","module-yaml-conventions","bmb","bmb/workflows/module/data/module-yaml-conventions.md","61b0f880aa99920f25d95b3ce333fa384f91d2eb2ed6d5179ba5b7524d9e625c" "md","output-format-standards","bmb","bmb/workflows/workflow/data/output-format-standards.md","8975765f4cf43478685529d559ad95691a677c85ebd1af42088f02dd83d448a3" @@ -101,7 +101,7 @@ type,name,module,path,hash "md","step-07-value","bmb","bmb/workflows/module/steps-b/step-07-value.md","8a1fadb590730bbcb33454974ffad289d6f61a93c1d317ee883f60311c003f2e" "md","step-08-agents","bmb","bmb/workflows/module/steps-b/step-08-agents.md","891f06eb89c9bbf687286252a4dda6cb19b0cc0b084f4b919aab5d7518fa9c77" "md","step-08-build-step-01","bmb","bmb/workflows/workflow/steps-c/step-08-build-step-01.md","cbdea1291bd9f2fe5d112ceb61caa05a81b00566997e4c5f7fc6d32ec4666267" -"md","step-08-celebrate","bmb","bmb/workflows/agent/steps-c/step-08-celebrate.md","291d03f324273ef6e00adb84e91e9f07821275e5554193333f3b069c976f1dfd" +"md","step-08-celebrate","bmb","bmb/workflows/agent/steps-c/step-08-celebrate.md","540fc2dc69aa402ffd7222ff37379100497e188ebec42616240b8c2b7d4ac493" "md","step-08-collaborative-experience-check","bmb","bmb/workflows/workflow/steps-v/step-08-collaborative-experience-check.md","5cffb645b0175b823f9607530625d1903920532f95e0d92b71fb233043dc4f4e" "md","step-08-report","bmb","bmb/workflows/module/steps-v/step-08-report.md","8e1d295dc29b6dab5fe0ec81f51b614cb8a62b849fe10895093685b3164fe2bd" "md","step-08b-subprocess-optimization","bmb","bmb/workflows/workflow/steps-v/step-08b-subprocess-optimization.md","1934aa38ebabab0ddf2777cacddd96f37554dcda8f80812b87564a4b64925c36" @@ -155,7 +155,7 @@ type,name,module,path,hash "md","workflow-validate-max-parallel-workflow","bmb","bmb/workflows/workflow/workflow-validate-max-parallel-workflow.md","3706b9ea43ee7308d227b2f18e3196626f545df552c134056773bf431f43a7b4" "md","workflow-validate-module","bmb","bmb/workflows/module/workflow-validate-module.md","78b71d8a816067898e9a92596f3d2f66d4f36dad2ef7fc076894077532715fe4" "md","workflow-validate-workflow","bmb","bmb/workflows/workflow/workflow-validate-workflow.md","40f34df97c9b2e23be656f3233cea7c5ff14def514a4d7735cd623f0887276d4" -"yaml","config","bmb","bmb/config.yaml","b07a7d0fa6ee9f72b7c029bf2abe91e010602971d9063768cf0abfb023cbdeee" +"yaml","config","bmb","bmb/config.yaml","73e6a014d69326a00e9a52bea3d3d973afd7519acf23853f04cbd58cc79eb9af" "csv","default-party","bmm","bmm/teams/default-party.csv","5af107a5b9e9092aeb81bd8c8b9bbe7003afb7bc500e64d56da7cc27ae0c4a6e" "csv","documentation-requirements","bmm","bmm/workflows/document-project/documentation-requirements.csv","d1253b99e88250f2130516b56027ed706e643bfec3d99316727a4c6ec65c6c1d" "csv","domain-complexity","bmm","bmm/workflows/2-plan-workflows/create-prd/data/domain-complexity.csv","f775f09fb4dc1b9214ca22db4a3994ce53343d976d7f6e5384949835db6d2770" @@ -163,25 +163,25 @@ type,name,module,path,hash "csv","module-help","bmm","bmm/module-help.csv","70ce6fcf717801e5b3d47f4d0496b027c5dc4e1ce0a0508613f5a4abd828a354" "csv","project-types","bmm","bmm/workflows/2-plan-workflows/create-prd/data/project-types.csv","7a01d336e940fb7a59ff450064fd1194cdedda316370d939264a0a0adcc0aca3" "csv","project-types","bmm","bmm/workflows/3-solutioning/create-architecture/data/project-types.csv","12343635a2f11343edb1d46906981d6f5e12b9cad2f612e13b09460b5e5106e7" -"json","project-scan-report-schema","bmm","bmm/workflows/document-project/templates/project-scan-report-schema.json","53255f15a10cab801a1d75b4318cdb0095eed08c51b3323b7e6c236ae6b399b7" +"json","project-scan-report-schema","bmm","bmm/workflows/document-project/templates/project-scan-report-schema.json","8466965321f1db22f5013869636199f67e0113706283c285a7ffbbf5efeea321" "md","architecture-decision-template","bmm","bmm/workflows/3-solutioning/create-architecture/architecture-decision-template.md","5d9adf90c28df61031079280fd2e49998ec3b44fb3757c6a202cda353e172e9f" "md","checklist","bmm","bmm/workflows/4-implementation/code-review/checklist.md","e30d2890ba5c50777bbe04071f754e975a1d7ec168501f321a79169c4201dd28" "md","checklist","bmm","bmm/workflows/4-implementation/correct-course/checklist.md","24a3f3e0108398d490dcfbe8669afc50226673cad494f16a668b515ab24bf709" -"md","checklist","bmm","bmm/workflows/4-implementation/create-story/checklist.md","5154aa874c6a79285eba644493e87411c6021baff72859490db6e693d15e0bb9" +"md","checklist","bmm","bmm/workflows/4-implementation/create-story/checklist.md","2c8b9d58ea997a6a71600031acb21c4477d8670cbb64c956c9480e942698bb48" "md","checklist","bmm","bmm/workflows/4-implementation/dev-story/checklist.md","630b68c6824a8785003a65553c1f335222b17be93b1bd80524c23b38bde1d8af" "md","checklist","bmm","bmm/workflows/4-implementation/sprint-planning/checklist.md","80b10aedcf88ab1641b8e5f99c9a400c8fd9014f13ca65befc5c83992e367dd7" "md","checklist","bmm","bmm/workflows/document-project/checklist.md","581b0b034c25de17ac3678db2dbafedaeb113de37ddf15a4df6584cf2324a7d7" "md","checklist","bmm","bmm/workflows/qa/automate/checklist.md","83cd779c6527ff34184dc86f9eebfc0a8a921aee694f063208aee78f80a8fb12" -"md","deep-dive-instructions","bmm","bmm/workflows/document-project/workflows/deep-dive-instructions.md","8cb3d32d7685e5deff4731c2003d30b4321ef6c29247b3ddbe672c185e022604" +"md","deep-dive-instructions","bmm","bmm/workflows/document-project/workflows/deep-dive-instructions.md","48b947d438c29a44bfda2ec3c05efcc987397055dc143a49d44c9d4174b7ac09" "md","deep-dive-template","bmm","bmm/workflows/document-project/templates/deep-dive-template.md","6198aa731d87d6a318b5b8d180fc29b9aa53ff0966e02391c17333818e94ffe9" "md","epics-template","bmm","bmm/workflows/3-solutioning/create-epics-and-stories/templates/epics-template.md","b8ec5562b2a77efd80c40eba0421bbaab931681552e5a0ff01cd93902c447ff7" -"md","full-scan-instructions","bmm","bmm/workflows/document-project/workflows/full-scan-instructions.md","6c6e0d77b33f41757eed8ebf436d4def69cd6ce412395b047bf5909f66d876aa" +"md","full-scan-instructions","bmm","bmm/workflows/document-project/workflows/full-scan-instructions.md","419912da2b9ea5642c5eff1805f07b8dc29138c23fba0d1092da75506e5e29fb" "md","index-template","bmm","bmm/workflows/document-project/templates/index-template.md","42c8a14f53088e4fda82f26a3fe41dc8a89d4bcb7a9659dd696136378b64ee90" -"md","instructions","bmm","bmm/workflows/4-implementation/correct-course/instructions.md","afdf74701cd2e1200efeb4af24e99a52b013c4c150c1736c56b5d34f003c0a94" -"md","instructions","bmm","bmm/workflows/4-implementation/retrospective/instructions.md","c1357ee8149935b391db1fd7cc9869bf3b450132f04d27fbb11906d421923bf8" -"md","instructions","bmm","bmm/workflows/4-implementation/sprint-planning/instructions.md","8ac972eb08068305223e37dceac9c3a22127062edae2692f95bc16b8dbafa046" -"md","instructions","bmm","bmm/workflows/4-implementation/sprint-status/instructions.md","0d2a75639c9e402c06bf0dfab51cdacf8f63e4401ae4bc5e7fe9e92e7779bba1" -"md","instructions","bmm","bmm/workflows/document-project/instructions.md","8807cf832c2bce8062280e10ae00928e4e147d148dd326fb6437571531e22723" +"md","instructions","bmm","bmm/workflows/4-implementation/correct-course/instructions.md","9e239bb0653ef06846b03458c4d341fe5b82b173344c0a65cf226b989ac91313" +"md","instructions","bmm","bmm/workflows/4-implementation/retrospective/instructions.md","8dbd18308a8bafc462759934125725222e09c48de2e9af3cde73789867293def" +"md","instructions","bmm","bmm/workflows/4-implementation/sprint-planning/instructions.md","888312e225ce1944c21a98fbf49c4f118967b3676b23919906bdeda1132a2833" +"md","instructions","bmm","bmm/workflows/4-implementation/sprint-status/instructions.md","d4b7107ddbe33fb5dfc68a626c55585837743c39d171c73052cd93532c35c11d" +"md","instructions","bmm","bmm/workflows/document-project/instructions.md","57762fb89b42df577da1188bc881cf3a8d75a1bcc60bce9e1ab2b8bcfdf29a66" "md","instructions","bmm","bmm/workflows/qa/automate/instructions.md","3f3505f847f943b2f4a0699017c16e15fa3782f51090a0332304d7248e020e0c" "md","prd-purpose","bmm","bmm/workflows/2-plan-workflows/create-prd/data/prd-purpose.md","49c4641b91504bb14e3887029b70beacaff83a2de200ced4f8cb11c1356ecaee" "md","prd-template","bmm","bmm/workflows/2-plan-workflows/create-prd/templates/prd-template.md","7ccccab9c06a626b7a228783b0b9b6e4172e9ec0b10d47bbfab56958c898f837" @@ -202,7 +202,7 @@ type,name,module,path,hash "md","step-01-init","bmm","bmm/workflows/2-plan-workflows/create-ux-design/steps/step-01-init.md","7b3467a29126c9498b57b06d688f610bcb7a68a8975208c209dd1103546bc455" "md","step-01-init","bmm","bmm/workflows/3-solutioning/create-architecture/steps/step-01-init.md","c730b1f23f0298853e5bf0b9007c2fc86e835fb3d53455d2068a6965d1192f49" "md","step-01-mode-detection","bmm","bmm/workflows/bmad-quick-flow/quick-dev/steps/step-01-mode-detection.md","d3170f565ed21633a1f08b50c90349c93d1ec362fe6ec86c746f507796acd745" -"md","step-01-understand","bmm","bmm/workflows/bmad-quick-flow/quick-spec/steps/step-01-understand.md","a65eb3b993d83f24c4f14cd0117d1c21ad5013b32fcdcf7276c6e4ba0aed7d61" +"md","step-01-understand","bmm","bmm/workflows/bmad-quick-flow/quick-spec/steps/step-01-understand.md","9dcea07431d15d15357045e4e1522c3aa6978a099dadf8db674ecf4846e391c7" "md","step-01-validate-prerequisites","bmm","bmm/workflows/3-solutioning/create-epics-and-stories/steps/step-01-validate-prerequisites.md","5ba8ba972e8376339ed2c9b75e4f98125521af0270bb5dff6e47ec73137e01de" "md","step-01b-continue","bmm","bmm/workflows/1-analysis/create-product-brief/steps/step-01b-continue.md","08bd92dc8486983ac8b5b19efd943d2fd83f2a6f6ba247aad9bb075e12b20860" "md","step-01b-continue","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-c/step-01b-continue.md","4e8af43d1847236333566efaa4b0b5e63d706e673872705ee6f215a7ccb9d715" @@ -212,7 +212,7 @@ type,name,module,path,hash "md","step-02-context-gathering","bmm","bmm/workflows/bmad-quick-flow/quick-dev/steps/step-02-context-gathering.md","a79d99cc35e43442acda2ce7da80f26f4f50e2be08f38c10e4e5695ce0ff6016" "md","step-02-customer-behavior","bmm","bmm/workflows/1-analysis/research/market-steps/step-02-customer-behavior.md","ca77a54143c2df684cf859e10cea48c6ea1ce8e297068a0f0f26ee63d3170c1e" "md","step-02-design-epics","bmm","bmm/workflows/3-solutioning/create-epics-and-stories/steps/step-02-design-epics.md","2c18d76a9b73eae8b9f552cd4252f8208a0c017624ddbaf6bcbe7b28ddfa217e" -"md","step-02-discovery","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02-discovery.md","d13de9d4a4af17f04ae1af7966b3071af54a6445c0944ee83af129ef078ebe5d" +"md","step-02-discovery","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02-discovery.md","706d3e040c3997d1985e5088cd05e9310b3e3ee5c37d49f0edd24f54b7b88cc5" "md","step-02-discovery","bmm","bmm/workflows/2-plan-workflows/create-ux-design/steps/step-02-discovery.md","6d340f83d62f873a4c09371a38c77dc9ce9726cd6cd1cf9bf89ddec09f36af4c" "md","step-02-domain-analysis","bmm","bmm/workflows/1-analysis/research/domain-steps/step-02-domain-analysis.md","385a288d9bbb0adf050bcce4da4dad198a9151822f9766900404636f2b0c7f9d" "md","step-02-generate","bmm","bmm/workflows/generate-project-context/steps/step-02-generate.md","0fff27dab748b4600d02d2fb083513fa4a4e061ed66828b633f7998fcf8257e1" @@ -220,6 +220,8 @@ type,name,module,path,hash "md","step-02-prd-analysis","bmm","bmm/workflows/3-solutioning/check-implementation-readiness/steps/step-02-prd-analysis.md","f8c4f293c0a040fa9f73829ffeabfa073d0a8ade583adaefb26431ec83a76398" "md","step-02-technical-overview","bmm","bmm/workflows/1-analysis/research/technical-steps/step-02-technical-overview.md","9c7582241038b16280cddce86f2943216541275daf0a935dcab78f362904b305" "md","step-02-vision","bmm","bmm/workflows/1-analysis/create-product-brief/steps/step-02-vision.md","a6262132ec081165358941df207d02e29e5ab00b4f516adf2772effa46d21dd5" +"md","step-02b-vision","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02b-vision.md","3b4ec4c20d83ae432d3514742cb00ad58ba653524e7158ea1b1e2c7e8266ea61" +"md","step-02c-executive-summary","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02c-executive-summary.md","5f759250087222be739b3cd8f1d4100626d377345b330fcc013388ec16cb855e" "md","step-03-competitive-landscape","bmm","bmm/workflows/1-analysis/research/domain-steps/step-03-competitive-landscape.md","f10aa088ba00c59491507f6519fb314139f8be6807958bb5fd1b66bff2267749" "md","step-03-complete","bmm","bmm/workflows/generate-project-context/steps/step-03-complete.md","cf8d1d1904aeddaddb043c3c365d026cd238891cd702c2b78bae032a8e08ae17" "md","step-03-core-experience","bmm","bmm/workflows/2-plan-workflows/create-ux-design/steps/step-03-core-experience.md","b23ce8244db8a183761a9420fa54ff285bbf7c54b2d30c62c32d3cf8cb4c2f00" @@ -282,7 +284,7 @@ type,name,module,path,hash "md","step-e-02-review","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-e/step-e-02-review.md","b2660d88a445dc3f8f168f96ca92d4a1a36949e3b39fbf6cda5c77129636d9b1" "md","step-e-03-edit","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-e/step-e-03-edit.md","dfcc3e4f0b1ec050d4985af04dc02b28174a995e95327ca01ae4b8cac10cc1e5" "md","step-e-04-complete","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-e/step-e-04-complete.md","a1100f8639120311cbaf5a5a880db4e137216bc4bd0110b0926004107a99d3c3" -"md","step-v-01-discovery","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-v/step-v-01-discovery.md","287c39e44b32faab52fb155a4a30ab3f31cf6ef5c599b8b15687e5bb3c97a447" +"md","step-v-01-discovery","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-v/step-v-01-discovery.md","bd3353377451ab6ebffdb94895c4e089fb2e5dce4ecb33c5b69f42f71022ea1f" "md","step-v-02-format-detection","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-v/step-v-02-format-detection.md","251ea5a1cf7779db2dc39d5d8317976a27f84b421359c1974ae96c0943094341" "md","step-v-02b-parity-check","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-v/step-v-02b-parity-check.md","3481beae212bb0140c105d0ae87bb9714859c93a471048048512fd1278da2fcd" "md","step-v-03-density-validation","bmm","bmm/workflows/2-plan-workflows/create-prd/steps-v/step-v-03-density-validation.md","5b95ecd032fb65f86b7eee7ce7c30c997dc2a8b5e4846d88c2853538591a9e40" @@ -304,32 +306,32 @@ type,name,module,path,hash "md","workflow","bmm","bmm/workflows/3-solutioning/check-implementation-readiness/workflow.md","ddfe66e2ced3a092d0be1606d36c5eb9610602e939059c902b22da1aa202e904" "md","workflow","bmm","bmm/workflows/3-solutioning/create-architecture/workflow.md","ad930c2c9b991fb56f0d04cfdbc69d04bffd5df2c515ca570ad7d388f56a055c" "md","workflow","bmm","bmm/workflows/3-solutioning/create-epics-and-stories/workflow.md","d40eb6e04de52d4265af460322a9487bb2c241453b0a59940e1bb04836a7ba65" -"md","workflow","bmm","bmm/workflows/bmad-quick-flow/quick-dev/workflow.md","7e13f74e23f9de40ed15140b5cadb28a7462ad019dc345422b3aede59ad8e7f7" -"md","workflow","bmm","bmm/workflows/bmad-quick-flow/quick-spec/workflow.md","e7856a24e0f39108ae494569b8ceb2eb1ca10588ed3869c5feef98832c54db78" +"md","workflow","bmm","bmm/workflows/bmad-quick-flow/quick-dev/workflow.md","2f2b404184346494cb769b36aab2872b0b9aaaad38057d42a7702cf6c5110501" +"md","workflow","bmm","bmm/workflows/bmad-quick-flow/quick-spec/workflow.md","57125255ac43c2ccaa421b6334ee1c5362db140e408a7d94be6e32d4c2e6cc47" "md","workflow","bmm","bmm/workflows/generate-project-context/workflow.md","0da857be1b7fb46fc29afba22b78a8b2150b17db36db68fd254ad925a20666aa" "md","workflow-create-prd","bmm","bmm/workflows/2-plan-workflows/create-prd/workflow-create-prd.md","2331a3f02fd4bc3628e3bb1684645e8392a77e8b5b9f918e55554616a2bfe06b" "md","workflow-domain-research","bmm","bmm/workflows/1-analysis/research/workflow-domain-research.md","137509e99ad4b11c391ebe87832d4820c46da75ed8570dd5b5a71f4372b75c73" "md","workflow-edit-prd","bmm","bmm/workflows/2-plan-workflows/create-prd/workflow-edit-prd.md","e433664058429f54b49237ad7b2eba43fb115b8b9c68c87846f9523405ac73ef" "md","workflow-market-research","bmm","bmm/workflows/1-analysis/research/workflow-market-research.md","2798d9cbeab426df7f2bcc228771fc5d5e1a58302eef769e2bbd36ce7d7f43e4" "md","workflow-technical-research","bmm","bmm/workflows/1-analysis/research/workflow-technical-research.md","16974efc305ab195209232eea5e7ab828df2c6244b8c2ba7ca4a517e90b38b64" -"md","workflow-validate-prd","bmm","bmm/workflows/2-plan-workflows/create-prd/workflow-validate-prd.md","5f7d3a188b5c68fb621b31da4ba62b75444615a0f8eadfe8c505c11f4a8e404c" +"md","workflow-validate-prd","bmm","bmm/workflows/2-plan-workflows/create-prd/workflow-validate-prd.md","149cd27aef9df5b5d7bb7c94b3b7d077aba1a17dc647de673d60da15ddc62539" "xml","instructions","bmm","bmm/workflows/4-implementation/code-review/instructions.xml","1a6f0ae7d69a5c27b09de3efab2b205a007b466976acdeeaebf7f3abec7feb68" -"xml","instructions","bmm","bmm/workflows/4-implementation/create-story/instructions.xml","38eae4b503711a162f55ccd41b770248581a4357cbbfe1cf1bb34520307ccd63" -"xml","instructions","bmm","bmm/workflows/4-implementation/dev-story/instructions.xml","396eba2694f455e9aa8f0e123b4147799e07205cfb666a411e8a5d0d4b6b5daa" -"yaml","config","bmm","bmm/config.yaml","0a6573f3f0ef230b84bb9e7409fd11b5384f8f280c201c4d5af2b24fe3bea253" -"yaml","deep-dive","bmm","bmm/workflows/document-project/workflows/deep-dive.yaml","a16b5d121604ca00fffdcb04416daf518ec2671a3251b7876c4b590d25d96945" -"yaml","full-scan","bmm","bmm/workflows/document-project/workflows/full-scan.yaml","8ba79b190733006499515d9d805f4eacd90a420ffc454e04976948c114806c25" +"xml","instructions","bmm","bmm/workflows/4-implementation/create-story/instructions.xml","d4edc80bd7ccc0f7a844ecb575016b79380e255a236d1182f5f7312a104f0e3a" +"xml","instructions","bmm","bmm/workflows/4-implementation/dev-story/instructions.xml","b177c039072ad5e8a54374e6a17a2074dd608fd4da047bef528e362919a0fde8" +"yaml","config","bmm","bmm/config.yaml","81e3ec7befcbd126d0c6c5cf9207397d72533affc0914462f93430fb418bea11" +"yaml","deep-dive","bmm","bmm/workflows/document-project/workflows/deep-dive.yaml","efa8d70a594b7580f5312340f93da16f9e106419b1b1d06d2e23d6a30ef963fa" +"yaml","full-scan","bmm","bmm/workflows/document-project/workflows/full-scan.yaml","9d71cce37de1c3f43a7122f3c9705abdf3d677141698a2ab1b89a225f78f3fa9" "yaml","sprint-status-template","bmm","bmm/workflows/4-implementation/sprint-planning/sprint-status-template.yaml","0d7fe922f21d4f00e538c265ff90e470c3e2eca761e663d84b7a1320b2f25980" "yaml","team-fullstack","bmm","bmm/teams/team-fullstack.yaml","da8346b10dfad8e1164a11abeb3b0a84a1d8b5f04e01e8490a44ffca477a1b96" -"yaml","workflow","bmm","bmm/workflows/4-implementation/code-review/workflow.yaml","4ddef804c51bd83ad51f39e752333383ab559c0986efb8404b659e4728c81ad8" -"yaml","workflow","bmm","bmm/workflows/4-implementation/correct-course/workflow.yaml","0d9c4502fc2f9524644918e33271d648bf7929e91eba8645e39d7d7c7e67eac7" -"yaml","workflow","bmm","bmm/workflows/4-implementation/create-story/workflow.yaml","7989989306494ad06bd5a08f9be73b50d791389226c3b2c8c281ffb8d078d70a" -"yaml","workflow","bmm","bmm/workflows/4-implementation/dev-story/workflow.yaml","36d144a797706f438f973d4fe0679b98096eb1b911f8b7df3f9a8db4fab5e9d2" -"yaml","workflow","bmm","bmm/workflows/4-implementation/retrospective/workflow.yaml","7eac3fda56bb7106a160b446121de55b25d20d60eadcf2caf1ca3245ad84208f" -"yaml","workflow","bmm","bmm/workflows/4-implementation/sprint-planning/workflow.yaml","3f31e4b0973525228549cef18123816d82dc45741dab1f48720eefb191876f81" -"yaml","workflow","bmm","bmm/workflows/4-implementation/sprint-status/workflow.yaml","f03d2804afca3ee29a612117f6bf090b455354a3557c2198ec9b8eb5c5900cef" -"yaml","workflow","bmm","bmm/workflows/document-project/workflow.yaml","9e2886d022d4054c0e6ca6580673f775415add7924961d6723ed13156200a819" -"yaml","workflow","bmm","bmm/workflows/qa/automate/workflow.yaml","670d28da3e20a445ae08ab3e907eaf3eaf13d9a08c4b26244344a0fd8f54a399" +"yaml","workflow","bmm","bmm/workflows/4-implementation/code-review/workflow.yaml","a431060bb5069fb2abe6dac53f2b9bb9ed154319b874cd00f8b5face0496073e" +"yaml","workflow","bmm","bmm/workflows/4-implementation/correct-course/workflow.yaml","db0da2523bdef2fb7cecb9d26fc2795370a0e83eb3a73dd5f871c1a8e8f667b2" +"yaml","workflow","bmm","bmm/workflows/4-implementation/create-story/workflow.yaml","c1f1a56a1a485f24c3b8cadd9f583cc684a60e2219b4fc173724b366d7cfd1ad" +"yaml","workflow","bmm","bmm/workflows/4-implementation/dev-story/workflow.yaml","5675197327e95be199e42d19a7361e529f86e7e067cebd359a40532555650db3" +"yaml","workflow","bmm","bmm/workflows/4-implementation/retrospective/workflow.yaml","4e93ddc82ea0e875894ec27564b97970b57f6bfe29e257ada8fa628d8a579002" +"yaml","workflow","bmm","bmm/workflows/4-implementation/sprint-planning/workflow.yaml","efe6ef312dfc9b92a5837f2cf74bcd0b52cd5a1a171067d530934c5f6b42ed57" +"yaml","workflow","bmm","bmm/workflows/4-implementation/sprint-status/workflow.yaml","391bb9c265cb930654b06948c442101bc3def3fdc880b8481773a51a17d5d989" +"yaml","workflow","bmm","bmm/workflows/document-project/workflow.yaml","0c8f6ed05c48ec69b7ebb1cfe3acda65c6480abb082d6dbd7696405978127b91" +"yaml","workflow","bmm","bmm/workflows/qa/automate/workflow.yaml","71503c95c1dddd963cc689d6cbeb38d9cacdbc747a5467f1c933394548c34cc0" "csv","default-party","cis","cis/teams/default-party.csv","464310e738ec38cf8114552e8274f6c517a17db0e0b176d494ab50154ba982d5" "csv","design-methods","cis","cis/workflows/design-thinking/design-methods.csv","6735e9777620398e35b7b8ccb21e9263d9164241c3b9973eb76f5112fb3a8fc9" "csv","innovation-frameworks","cis","cis/workflows/innovation-strategy/innovation-frameworks.csv","9a14473b1d667467172d8d161e91829c174e476a030a983f12ec6af249c4e42f" @@ -349,7 +351,7 @@ type,name,module,path,hash "md","template","cis","cis/workflows/innovation-strategy/template.md","e59bd789df87130bde034586d3e68bf1847c074f63d839945e0c29b1d0c85c82" "md","template","cis","cis/workflows/problem-solving/template.md","6c9efd7ac7b10010bd9911db16c2fbdca01fb0c306d871fa6381eef700b45608" "md","template","cis","cis/workflows/storytelling/template.md","461981aa772ef2df238070cbec90fc40995df2a71a8c22225b90c91afed57452" -"yaml","config","cis","cis/config.yaml","b10a19d9b4879ad2ae3aa23b1d7c519ce27a2aa07bab3ccbf6f0d8d65220a503" +"yaml","config","cis","cis/config.yaml","14ca25f123e04b9412281eb3a40e7a4c3a3ff0a1089460dcce6ea672d185dfcd" "yaml","creative-squad","cis","cis/teams/creative-squad.yaml","25407cf0ebdf5b10884cd03c86068e04715ef270ada93a3b64cb9907b62c71cf" "yaml","workflow","cis","cis/workflows/design-thinking/workflow.yaml","1feb8900e6716125af1ef533bcc54659670de0a3e44ff66348518423c5e7a7fb" "yaml","workflow","cis","cis/workflows/innovation-strategy/workflow.yaml","37b5e7f7d89999c85591bd5d95bfe2617f7690cfb8f0e1064803ec307a56eaaa" @@ -380,4 +382,4 @@ type,name,module,path,hash "xml","shard-doc","core","core/tasks/shard-doc.xml","947f2c7d4f6bb269ad0bcc1a03227d0d6da642d9df47894b8ba215c5149aed3d" "xml","workflow","core","core/tasks/workflow.xml","17bca7fa63bae20aaac4768d81463a7a2de7f80b60d4d9a8f36b70821ba86cfd" "xml","workflow","core","core/workflows/advanced-elicitation/workflow.xml","ead4dc1e50c95d8966b3676842a57fca97c70d83f1f3b9e9c2d746821e6868b4" -"yaml","config","core","core/config.yaml","ce05d8f79140f56f3766e5613e75ed1ef0d0a66aa40f3abf6a579e692ec60e04" +"yaml","config","core","core/config.yaml","7338e2560f0e40c576976ab4d513b9be818c70d2632552d79c56cc50548518d7" diff --git a/_bmad/_config/ides/antigravity.yaml b/_bmad/_config/ides/antigravity.yaml index dd6c570..94182ea 100644 --- a/_bmad/_config/ides/antigravity.yaml +++ b/_bmad/_config/ides/antigravity.yaml @@ -1,5 +1,5 @@ ide: antigravity configured_date: 2026-02-12T20:59:56.441Z -last_updated: 2026-02-12T20:59:56.441Z +last_updated: 2026-02-18T19:39:00.060Z configuration: _noConfigNeeded: true diff --git a/_bmad/_config/ides/cline.yaml b/_bmad/_config/ides/cline.yaml new file mode 100644 index 0000000..9fff891 --- /dev/null +++ b/_bmad/_config/ides/cline.yaml @@ -0,0 +1,5 @@ +ide: cline +configured_date: 2026-02-18T19:39:00.098Z +last_updated: 2026-02-18T19:39:00.098Z +configuration: + _noConfigNeeded: true diff --git a/_bmad/_config/ides/cursor.yaml b/_bmad/_config/ides/cursor.yaml index 8515c61..9ab85b8 100644 --- a/_bmad/_config/ides/cursor.yaml +++ b/_bmad/_config/ides/cursor.yaml @@ -1,5 +1,5 @@ ide: cursor configured_date: 2026-02-12T20:59:56.426Z -last_updated: 2026-02-12T20:59:56.426Z +last_updated: 2026-02-18T19:39:00.035Z configuration: _noConfigNeeded: true diff --git a/_bmad/_config/ides/opencode.yaml b/_bmad/_config/ides/opencode.yaml index 1c07fe0..377d42d 100644 --- a/_bmad/_config/ides/opencode.yaml +++ b/_bmad/_config/ides/opencode.yaml @@ -1,5 +1,5 @@ ide: opencode configured_date: 2026-02-12T20:59:56.454Z -last_updated: 2026-02-12T20:59:56.454Z +last_updated: 2026-02-18T19:39:00.081Z configuration: _noConfigNeeded: true diff --git a/_bmad/_config/manifest.yaml b/_bmad/_config/manifest.yaml index ebf6a48..454e1b5 100644 --- a/_bmad/_config/manifest.yaml +++ b/_bmad/_config/manifest.yaml @@ -1,33 +1,33 @@ installation: - version: 6.0.0-Beta.8 + version: 6.0.1 installDate: 2026-02-12T20:59:56.383Z - lastUpdated: 2026-02-12T20:59:56.383Z + lastUpdated: 2026-02-18T19:38:59.980Z modules: - name: core - version: 6.0.0-Beta.8 + version: 6.0.1 installDate: 2026-02-12T20:59:55.888Z - lastUpdated: 2026-02-12T20:59:55.888Z + lastUpdated: 2026-02-18T19:38:59.449Z + source: built-in + npmPackage: null + repoUrl: null + - name: bmm + version: 6.0.1 + installDate: 2026-02-12T20:59:54.514Z + lastUpdated: 2026-02-18T19:38:59.449Z source: built-in npmPackage: null repoUrl: null - name: bmb version: 0.1.6 installDate: 2026-02-12T20:59:54.421Z - lastUpdated: 2026-02-12T20:59:56.136Z + lastUpdated: 2026-02-18T19:38:59.720Z source: external npmPackage: bmad-builder repoUrl: https://github.com/bmad-code-org/bmad-builder - - name: bmm - version: 6.0.0-Beta.8 - installDate: 2026-02-12T20:59:54.514Z - lastUpdated: 2026-02-12T20:59:56.136Z - source: built-in - npmPackage: null - repoUrl: null - name: cis version: 0.1.6 installDate: 2026-02-12T20:59:55.869Z - lastUpdated: 2026-02-12T20:59:56.383Z + lastUpdated: 2026-02-18T19:38:59.980Z source: external npmPackage: bmad-creative-intelligence-suite repoUrl: https://github.com/bmad-code-org/bmad-module-creative-intelligence-suite @@ -35,3 +35,4 @@ ides: - cursor - antigravity - opencode + - cline diff --git a/_bmad/_config/workflow-manifest.csv b/_bmad/_config/workflow-manifest.csv index 4093016..caddbc0 100644 --- a/_bmad/_config/workflow-manifest.csv +++ b/_bmad/_config/workflow-manifest.csv @@ -1,18 +1,6 @@ name,description,module,path "brainstorming","Facilitate interactive brainstorming sessions using diverse creative techniques and ideation methods","core","_bmad/core/workflows/brainstorming/workflow.md" "party-mode","Orchestrates group discussions between all installed BMAD agents, enabling natural multi-agent conversations","core","_bmad/core/workflows/party-mode/workflow.md" -"create-agent","Create a new BMAD agent with best practices and compliance","bmb","_bmad/bmb/workflows/agent/workflow-create-agent.md" -"edit-agent","Edit existing BMAD agents while maintaining compliance","bmb","_bmad/bmb/workflows/agent/workflow-edit-agent.md" -"validate-agent","Validate existing BMAD agents and offer to improve deficiencies","bmb","_bmad/bmb/workflows/agent/workflow-validate-agent.md" -"create-module-brief","Create product brief for BMAD module development","bmb","_bmad/bmb/workflows/module/workflow-create-module-brief.md" -"create-module","Create a complete BMAD module with agents, workflows, and infrastructure","bmb","_bmad/bmb/workflows/module/workflow-create-module.md" -"edit-module","Edit existing BMAD modules while maintaining coherence","bmb","_bmad/bmb/workflows/module/workflow-edit-module.md" -"validate-module","Run compliance check on BMAD modules against best practices","bmb","_bmad/bmb/workflows/module/workflow-validate-module.md" -"create-workflow","Create a new BMAD workflow with proper structure and best practices","bmb","_bmad/bmb/workflows/workflow/workflow-create-workflow.md" -"edit-workflow","Edit existing BMAD workflows while maintaining integrity","bmb","_bmad/bmb/workflows/workflow/workflow-edit-workflow.md" -"rework-workflow","Rework a Workflow to a V6 Compliant Version","bmb","_bmad/bmb/workflows/workflow/workflow-rework-workflow.md" -"validate-max-parallel-workflow","Run validation checks in MAX-PARALLEL mode against a workflow requires a tool that supports Parallel Sub-Processes","bmb","_bmad/bmb/workflows/workflow/workflow-validate-max-parallel-workflow.md" -"validate-workflow","Run validation check on BMAD workflows against best practices","bmb","_bmad/bmb/workflows/workflow/workflow-validate-workflow.md" "create-product-brief","Create comprehensive product briefs through collaborative step-by-step discovery as creative Business Analyst working with the user as peers.","bmm","_bmad/bmm/workflows/1-analysis/create-product-brief/workflow.md" "domain-research","Conduct domain research covering industry analysis, regulations, technology trends, and ecosystem dynamics using current web data and verified sources.","bmm","_bmad/bmm/workflows/1-analysis/research/workflow-domain-research.md" "market-research","Conduct market research covering market size, growth, competition, and customer insights using current web data and verified sources.","bmm","_bmad/bmm/workflows/1-analysis/research/workflow-market-research.md" @@ -36,6 +24,18 @@ name,description,module,path "document-project","Analyzes and documents brownfield projects by scanning codebase, architecture, and patterns to create comprehensive reference documentation for AI-assisted development","bmm","_bmad/bmm/workflows/document-project/workflow.yaml" "generate-project-context","Creates a concise project-context.md file with critical rules and patterns that AI agents must follow when implementing code. Optimized for LLM context efficiency.","bmm","_bmad/bmm/workflows/generate-project-context/workflow.md" "qa-automate","Generate tests quickly for existing features using standard test patterns","bmm","_bmad/bmm/workflows/qa/automate/workflow.yaml" +"create-agent","Create a new BMAD agent with best practices and compliance","bmb","_bmad/bmb/workflows/agent/workflow-create-agent.md" +"edit-agent","Edit existing BMAD agents while maintaining compliance","bmb","_bmad/bmb/workflows/agent/workflow-edit-agent.md" +"validate-agent","Validate existing BMAD agents and offer to improve deficiencies","bmb","_bmad/bmb/workflows/agent/workflow-validate-agent.md" +"create-module-brief","Create product brief for BMAD module development","bmb","_bmad/bmb/workflows/module/workflow-create-module-brief.md" +"create-module","Create a complete BMAD module with agents, workflows, and infrastructure","bmb","_bmad/bmb/workflows/module/workflow-create-module.md" +"edit-module","Edit existing BMAD modules while maintaining coherence","bmb","_bmad/bmb/workflows/module/workflow-edit-module.md" +"validate-module","Run compliance check on BMAD modules against best practices","bmb","_bmad/bmb/workflows/module/workflow-validate-module.md" +"create-workflow","Create a new BMAD workflow with proper structure and best practices","bmb","_bmad/bmb/workflows/workflow/workflow-create-workflow.md" +"edit-workflow","Edit existing BMAD workflows while maintaining integrity","bmb","_bmad/bmb/workflows/workflow/workflow-edit-workflow.md" +"rework-workflow","Rework a Workflow to a V6 Compliant Version","bmb","_bmad/bmb/workflows/workflow/workflow-rework-workflow.md" +"validate-max-parallel-workflow","Run validation checks in MAX-PARALLEL mode against a workflow requires a tool that supports Parallel Sub-Processes","bmb","_bmad/bmb/workflows/workflow/workflow-validate-max-parallel-workflow.md" +"validate-workflow","Run validation check on BMAD workflows against best practices","bmb","_bmad/bmb/workflows/workflow/workflow-validate-workflow.md" "design-thinking","Guide human-centered design processes using empathy-driven methodologies. This workflow walks through the design thinking phases - Empathize, Define, Ideate, Prototype, and Test - to create solutions deeply rooted in user needs.","cis","_bmad/cis/workflows/design-thinking/workflow.yaml" "innovation-strategy","Identify disruption opportunities and architect business model innovation. This workflow guides strategic analysis of markets, competitive dynamics, and business model innovation to uncover sustainable competitive advantages and breakthrough opportunities.","cis","_bmad/cis/workflows/innovation-strategy/workflow.yaml" "problem-solving","Apply systematic problem-solving methodologies to crack complex challenges. This workflow guides through problem diagnosis, root cause analysis, creative solution generation, evaluation, and implementation planning using proven frameworks.","cis","_bmad/cis/workflows/problem-solving/workflow.yaml" diff --git a/_bmad/_memory/config.yaml b/_bmad/_memory/config.yaml index b5bfa58..a7b1b71 100644 --- a/_bmad/_memory/config.yaml +++ b/_bmad/_memory/config.yaml @@ -1,7 +1,7 @@ # _MEMORY Module Configuration # Generated by BMAD installer -# Version: 6.0.0-Beta.8 -# Date: 2026-02-12T20:59:55.870Z +# Version: 6.0.1 +# Date: 2026-02-18T19:38:59.435Z # Core Configuration Values diff --git a/_bmad/bmb/config.yaml b/_bmad/bmb/config.yaml index 63a0aba..a0819e0 100644 --- a/_bmad/bmb/config.yaml +++ b/_bmad/bmb/config.yaml @@ -1,7 +1,7 @@ # BMB Module Configuration # Generated by BMAD installer -# Version: 6.0.0-Beta.8 -# Date: 2026-02-12T20:59:55.870Z +# Version: 6.0.1 +# Date: 2026-02-18T19:38:59.436Z bmb_creations_output_folder: "{project-root}/_bmad-output/bmb-creations" diff --git a/_bmad/bmb/workflows/agent/steps-c/step-08-celebrate.md b/_bmad/bmb/workflows/agent/steps-c/step-08-celebrate.md index 51b898c..bfb2d28 100644 --- a/_bmad/bmb/workflows/agent/steps-c/step-08-celebrate.md +++ b/_bmad/bmb/workflows/agent/steps-c/step-08-celebrate.md @@ -122,22 +122,21 @@ Present enthusiastic celebration: **Key Steps:** 1. **Create a module folder:** Name it something descriptive (e.g., `my-custom-stuff`) -2. **Add module.yaml:** Include a `module.yaml` file with `unitary: true` -3. **Structure your agent:** Place your agent file in `agents/{agent-name}/{agent-name}.agent.yaml` -4. **Include sidecar (if Expert):** For Expert agents, include the `_memory/{sidecar-folder}/` structure +2. **Add module.yaml:** Include a `module.yaml` file with `code`, `name`, `version` +3. **Copy your agent:** Copy the entire folder from `_bmad-creations/{agent-name}/` to `agents/` +4. **The workflow handles structure:** Sidecar folders are already in the right place **Module Structure Example:** ``` my-custom-stuff/ -├── module.yaml # Contains: unitary: true -├── agents/ # Custom agents go here +├── module.yaml +├── agents/ # Copy entire folder from _bmad-creations/ │ └── {agent-name}/ │ ├── {agent-name}.agent.yaml -│ └── _memory/ # Expert agents only -│ └── {sidecar-folder}/ -│ ├── memories.md -│ └── instructions.md -└── workflows/ # Optional: standalone custom workflows +│ └── {agent-name}-sidecar/ # Already created by workflow if hasSidecar: true +│ ├── memories.md +│ └── instructions.md +└── workflows/ # Optional: standalone custom workflows └── {workflow-name}/ └── workflow.md ``` @@ -175,16 +174,15 @@ my-custom-stuff/ ### Installation -Package your agent as a standalone module with `module.yaml` containing `unitary: true`. +Package your agent as a standalone module with a `module.yaml` file. See: {installationDocs} ### Quick Start 1. Create a module folder -2. Add module.yaml with `unitary: true` -3. Place agent in `agents/{agent-name}/` structure -4. Include sidecar folder for Expert agents -5. Install via BMAD installer +2. Add module.yaml with code, name, version +3. Copy entire agent folder from `_bmad-creations/{agent-name}/` to `agents/` +4. Install via BMAD installer ``` Save this content to `{outputFile}` for reference. diff --git a/_bmad/bmb/workflows/module/module-help-generate.md b/_bmad/bmb/workflows/module/module-help-generate.md index 1bb145f..de62e8e 100644 --- a/_bmad/bmb/workflows/module/module-help-generate.md +++ b/_bmad/bmb/workflows/module/module-help-generate.md @@ -91,7 +91,7 @@ May only have phase-3 entries that integrate into another module's workflow Sequence numbers fit logically before/after existing items ``` -**Standalone/Unitary collections:** +**Standalone collections:** ``` All entries are anytime No sequence numbers @@ -123,7 +123,7 @@ Load and read: Extract: - `code` - Module identifier -- `type` - Module type (module, unitary, etc.) +- `type` - Module type - `name` - Module display name ### Step 3: Check for Existing module-help.csv @@ -232,7 +232,7 @@ mwm,phase-1,Daily Check In,DCI,10,_bmad/mwm/workflows/daily-checkin/workflow.md, mwm,phase-2,Wellness Journal,WJ,20,_bmad/mwm/workflows/wellness-journal/workflow.md,mwm_journal,false,wellness-companion,Journal Mode,"Reflect and track your wellness journey",mwm_output,"entry", ``` -### Unitary/Standalone Module (like bmad-custom): +### Standalone Module (like bmad-custom): ```csv module,phase,name,code,sequence,workflow-file,command,required,agent,options,description,output-location,outputs, bmad-custom,anytime,Quiz Master,QM,,"bmad_quiz",false,,Trivia,"Interactive trivia quiz with gameshow atmosphere",bmad_output,"results", diff --git a/_bmad/bmm/agents/analyst.md b/_bmad/bmm/agents/analyst.md index 6107e49..a7e6348 100644 --- a/_bmad/bmm/agents/analyst.md +++ b/_bmad/bmm/agents/analyst.md @@ -6,7 +6,7 @@ description: "Business Analyst" You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. ```xml - + Load persona from this current agent file (already in context) 🚨 IMMEDIATE ACTION REQUIRED - BEFORE ANY OUTPUT: diff --git a/_bmad/bmm/agents/architect.md b/_bmad/bmm/agents/architect.md index 5b32642..05563c2 100644 --- a/_bmad/bmm/agents/architect.md +++ b/_bmad/bmm/agents/architect.md @@ -6,7 +6,7 @@ description: "Architect" You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. ```xml - + Load persona from this current agent file (already in context) 🚨 IMMEDIATE ACTION REQUIRED - BEFORE ANY OUTPUT: diff --git a/_bmad/bmm/agents/dev.md b/_bmad/bmm/agents/dev.md index 5111e07..37d9a72 100644 --- a/_bmad/bmm/agents/dev.md +++ b/_bmad/bmm/agents/dev.md @@ -6,7 +6,7 @@ description: "Developer Agent" You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. ```xml - + Load persona from this current agent file (already in context) 🚨 IMMEDIATE ACTION REQUIRED - BEFORE ANY OUTPUT: diff --git a/_bmad/bmm/agents/pm.md b/_bmad/bmm/agents/pm.md index f05b6ce..b5a91ac 100644 --- a/_bmad/bmm/agents/pm.md +++ b/_bmad/bmm/agents/pm.md @@ -6,7 +6,7 @@ description: "Product Manager" You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. ```xml - + Load persona from this current agent file (already in context) 🚨 IMMEDIATE ACTION REQUIRED - BEFORE ANY OUTPUT: diff --git a/_bmad/bmm/agents/qa.md b/_bmad/bmm/agents/qa.md index b137c71..0b4f3fa 100644 --- a/_bmad/bmm/agents/qa.md +++ b/_bmad/bmm/agents/qa.md @@ -6,7 +6,7 @@ description: "QA Engineer" You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. ```xml - + Load persona from this current agent file (already in context) 🚨 IMMEDIATE ACTION REQUIRED - BEFORE ANY OUTPUT: diff --git a/_bmad/bmm/agents/quick-flow-solo-dev.md b/_bmad/bmm/agents/quick-flow-solo-dev.md index b770278..178e6d3 100644 --- a/_bmad/bmm/agents/quick-flow-solo-dev.md +++ b/_bmad/bmm/agents/quick-flow-solo-dev.md @@ -6,7 +6,7 @@ description: "Quick Flow Solo Dev" You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. ```xml - + Load persona from this current agent file (already in context) 🚨 IMMEDIATE ACTION REQUIRED - BEFORE ANY OUTPUT: diff --git a/_bmad/bmm/agents/sm.md b/_bmad/bmm/agents/sm.md index 8c49f4b..8dd0258 100644 --- a/_bmad/bmm/agents/sm.md +++ b/_bmad/bmm/agents/sm.md @@ -6,7 +6,7 @@ description: "Scrum Master" You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. ```xml - + Load persona from this current agent file (already in context) 🚨 IMMEDIATE ACTION REQUIRED - BEFORE ANY OUTPUT: diff --git a/_bmad/bmm/agents/tech-writer/tech-writer.md b/_bmad/bmm/agents/tech-writer/tech-writer.md index 4c4cb65..e26e868 100644 --- a/_bmad/bmm/agents/tech-writer/tech-writer.md +++ b/_bmad/bmm/agents/tech-writer/tech-writer.md @@ -6,7 +6,7 @@ description: "Technical Writer" You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. ```xml - + Load persona from this current agent file (already in context) 🚨 IMMEDIATE ACTION REQUIRED - BEFORE ANY OUTPUT: @@ -52,7 +52,7 @@ You must fully embody this agent's persona and follow all activation instruction Technical Documentation Specialist + Knowledge Curator Experienced technical writer expert in CommonMark, DITA, OpenAPI. Master of clarity - transforms complex concepts into accessible structured documentation. Patient educator who explains like teaching a friend. Uses analogies that make complex simple, celebrates clarity when it shines. - - Every Technical Document I touch helps someone accomplish a task. Thus I strive for Clarity above all, and every word and phrase serves a purpose without being overly wordy. - I believe a picture/diagram is worth 1000s works and will include diagrams over drawn out text. - I understand the intended audience or will clarify with the user so I know when to simplify vs when to be detailed. - I will always strive to follow `_bmad/_memory/tech-writer-sidecar/documentation-standards.md` best practices. + - Every Technical Document I touch helps someone accomplish a task. Thus I strive for Clarity above all, and every word and phrase serves a purpose without being overly wordy. - I believe a picture/diagram is worth 1000s of words and will include diagrams over drawn out text. - I understand the intended audience or will clarify with the user so I know when to simplify vs when to be detailed. - I will always strive to follow `_bmad/_memory/tech-writer-sidecar/documentation-standards.md` best practices. [MH] Redisplay Menu Help diff --git a/_bmad/bmm/agents/ux-designer.md b/_bmad/bmm/agents/ux-designer.md index 0bbcd11..b5c5b5b 100644 --- a/_bmad/bmm/agents/ux-designer.md +++ b/_bmad/bmm/agents/ux-designer.md @@ -6,7 +6,7 @@ description: "UX Designer" You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. ```xml - + Load persona from this current agent file (already in context) 🚨 IMMEDIATE ACTION REQUIRED - BEFORE ANY OUTPUT: @@ -49,7 +49,7 @@ You must fully embody this agent's persona and follow all activation instruction [MH] Redisplay Menu Help [CH] Chat with the Agent about anything - [CU] Create UX: Guidance through realizing the plan for your UX to inform architecture and implementation. PRovides more details that what was discovered in the PRD + [CU] Create UX: Guidance through realizing the plan for your UX to inform architecture and implementation. Provides more details than what was discovered in the PRD [PM] Start Party Mode [DA] Dismiss Agent diff --git a/_bmad/bmm/config.yaml b/_bmad/bmm/config.yaml index b1892f5..41f3d66 100644 --- a/_bmad/bmm/config.yaml +++ b/_bmad/bmm/config.yaml @@ -1,7 +1,7 @@ # BMM Module Configuration # Generated by BMAD installer -# Version: 6.0.0-Beta.8 -# Date: 2026-02-12T20:59:55.871Z +# Version: 6.0.1 +# Date: 2026-02-18T19:38:59.436Z project_name: Entropyk user_skill_level: intermediate diff --git a/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02-discovery.md b/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02-discovery.md index 4829a4d..137e7d4 100644 --- a/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02-discovery.md +++ b/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02-discovery.md @@ -3,7 +3,7 @@ name: 'step-02-discovery' description: 'Discover project type, domain, and context through collaborative dialogue' # File References -nextStepFile: './step-03-success.md' +nextStepFile: './step-02b-vision.md' outputFile: '{planning_artifacts}/prd.md' # Data Files diff --git a/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02b-vision.md b/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02b-vision.md new file mode 100644 index 0000000..e7129bf --- /dev/null +++ b/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02b-vision.md @@ -0,0 +1,154 @@ +--- +name: 'step-02b-vision' +description: 'Discover the product vision and differentiator through collaborative dialogue' + +# File References +nextStepFile: './step-02c-executive-summary.md' +outputFile: '{planning_artifacts}/prd.md' + +# Task References +advancedElicitationTask: '{project-root}/_bmad/core/workflows/advanced-elicitation/workflow.xml' +partyModeWorkflow: '{project-root}/_bmad/core/workflows/party-mode/workflow.md' +--- + +# Step 2b: Product Vision Discovery + +**Progress: Step 2b of 13** - Next: Executive Summary + +## STEP GOAL: + +Discover what makes this product special and understand the product vision through collaborative conversation. No content generation — facilitation only. + +## MANDATORY EXECUTION RULES (READ FIRST): + +### Universal Rules: + +- 🛑 NEVER generate content without user input +- 📖 CRITICAL: Read the complete step file before taking any action +- 🔄 CRITICAL: When loading next step with 'C', ensure the entire file is read +- ✅ ALWAYS treat this as collaborative discovery between PM peers +- 📋 YOU ARE A FACILITATOR, not a content generator +- ✅ YOU MUST ALWAYS SPEAK OUTPUT In your Agent communication style with the config `{communication_language}` + +### Role Reinforcement: + +- ✅ You are a product-focused PM facilitator collaborating with an expert peer +- ✅ We engage in collaborative dialogue, not command-response +- ✅ You bring structured thinking and facilitation skills, while the user brings domain expertise and product vision + +### Step-Specific Rules: + +- 🎯 Focus on discovering vision and differentiator — no content generation yet +- 🚫 FORBIDDEN to generate executive summary content (that's the next step) +- 🚫 FORBIDDEN to append anything to the document in this step +- 💬 APPROACH: Natural conversation to understand what makes this product special +- 🎯 BUILD ON classification insights from step 2 + +## EXECUTION PROTOCOLS: + +- 🎯 Show your analysis before taking any action +- ⚠️ Present A/P/C menu after vision discovery is complete +- 📖 Update frontmatter, adding this step to the end of the list of stepsCompleted +- 🚫 FORBIDDEN to load next step until C is selected + +## CONTEXT BOUNDARIES: + +- Current document and frontmatter from steps 1 and 2 are available +- Project classification exists from step 2 (project type, domain, complexity, context) +- Input documents already loaded are in memory (product briefs, research, brainstorming, project docs) +- No executive summary content yet (that's step 2c) +- This step ONLY discovers — it does NOT write to the document + +## YOUR TASK: + +Discover the product vision and differentiator through natural conversation. Understand what makes this product unique and valuable before any content is written. + +## VISION DISCOVERY SEQUENCE: + +### 1. Acknowledge Classification Context + +Reference the classification from step 2 and use it to frame the vision conversation: + +"We've established this is a {{projectType}} in the {{domain}} domain with {{complexityLevel}} complexity. Now let's explore what makes this product special." + +### 2. Explore What Makes It Special + +Guide the conversation to uncover the product's unique value: + +- **User delight:** "What would make users say 'this is exactly what I needed'?" +- **Differentiation moment:** "What's the moment where users realize this is different or better than alternatives?" +- **Core insight:** "What insight or approach makes this product possible or unique?" +- **Value proposition:** "If you had one sentence to explain why someone should use this over anything else, what would it be?" + +### 3. Understand the Vision + +Dig deeper into the product vision: + +- **Problem framing:** "What's the real problem you're solving — not the surface symptom, but the deeper need?" +- **Future state:** "When this product is successful, what does the world look like for your users?" +- **Why now:** "Why is this the right time to build this?" + +### 4. Validate Understanding + +Reflect back what you've heard and confirm: + +"Here's what I'm hearing about your vision and differentiator: + +**Vision:** {{summarized_vision}} +**What Makes It Special:** {{summarized_differentiator}} +**Core Insight:** {{summarized_insight}} + +Does this capture it? Anything I'm missing?" + +Let the user confirm or refine your understanding. + +### N. Present MENU OPTIONS + +Present your understanding of the product vision for review, then display menu: + +"Based on our conversation, I have a clear picture of your product vision and what makes it special. I'll use these insights to draft the Executive Summary in the next step. + +**What would you like to do?**" + +Display: "**Select:** [A] Advanced Elicitation [P] Party Mode [C] Continue to Executive Summary (Step 2c of 13)" + +#### Menu Handling Logic: +- IF A: Read fully and follow: {advancedElicitationTask} with the current vision insights, process the enhanced insights that come back, ask user if they accept the improvements, if yes update understanding then redisplay menu, if no keep original understanding then redisplay menu +- IF P: Read fully and follow: {partyModeWorkflow} with the current vision insights, process the collaborative insights, ask user if they accept the changes, if yes update understanding then redisplay menu, if no keep original understanding then redisplay menu +- IF C: Update {outputFile} frontmatter by adding this step name to the end of stepsCompleted array, then read fully and follow: {nextStepFile} +- IF Any other: help user respond, then redisplay menu + +#### EXECUTION RULES: +- ALWAYS halt and wait for user input after presenting menu +- ONLY proceed to next step when user selects 'C' +- After other menu items execution, return to this menu + +## CRITICAL STEP COMPLETION NOTE + +ONLY WHEN [C continue option] is selected and [stepsCompleted updated], will you then read fully and follow: `{nextStepFile}` to generate the Executive Summary. + +--- + +## 🚨 SYSTEM SUCCESS/FAILURE METRICS + +### ✅ SUCCESS: + +- Classification context from step 2 acknowledged and built upon +- Natural conversation to understand product vision and differentiator +- User's existing documents (briefs, research, brainstorming) leveraged for vision insights +- Vision and differentiator validated with user before proceeding +- Clear understanding established that will inform Executive Summary generation +- Frontmatter updated with stepsCompleted when C selected + +### ❌ SYSTEM FAILURE: + +- Generating executive summary or any document content (that's step 2c!) +- Appending anything to the PRD document +- Not building on classification from step 2 +- Being prescriptive instead of having natural conversation +- Proceeding without user selecting 'C' + +❌ **CRITICAL**: Reading only partial step file - leads to incomplete understanding and poor decisions +❌ **CRITICAL**: Proceeding with 'C' without fully reading and understanding the next step file + +**Master Rule:** This step is vision discovery only. No content generation, no document writing. Have natural conversations, build on what you know from classification, and establish the vision that will feed into the Executive Summary. diff --git a/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02c-executive-summary.md b/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02c-executive-summary.md new file mode 100644 index 0000000..97e328b --- /dev/null +++ b/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-c/step-02c-executive-summary.md @@ -0,0 +1,170 @@ +--- +name: 'step-02c-executive-summary' +description: 'Generate and append the Executive Summary section to the PRD document' + +# File References +nextStepFile: './step-03-success.md' +outputFile: '{planning_artifacts}/prd.md' + +# Task References +advancedElicitationTask: '{project-root}/_bmad/core/workflows/advanced-elicitation/workflow.xml' +partyModeWorkflow: '{project-root}/_bmad/core/workflows/party-mode/workflow.md' +--- + +# Step 2c: Executive Summary Generation + +**Progress: Step 2c of 13** - Next: Success Criteria + +## STEP GOAL: + +Generate the Executive Summary content using insights from classification (step 2) and vision discovery (step 2b), then append it to the PRD document. + +## MANDATORY EXECUTION RULES (READ FIRST): + +### Universal Rules: + +- 🛑 NEVER generate content without user input +- 📖 CRITICAL: Read the complete step file before taking any action +- 🔄 CRITICAL: When loading next step with 'C', ensure the entire file is read +- ✅ ALWAYS treat this as collaborative discovery between PM peers +- 📋 YOU ARE A FACILITATOR, not a content generator +- ✅ YOU MUST ALWAYS SPEAK OUTPUT In your Agent communication style with the config `{communication_language}` + +### Role Reinforcement: + +- ✅ You are a product-focused PM facilitator collaborating with an expert peer +- ✅ We engage in collaborative dialogue, not command-response +- ✅ Content is drafted collaboratively — present for review before saving + +### Step-Specific Rules: + +- 🎯 Generate Executive Summary content based on discovered insights +- 💬 Present draft content for user review and refinement before appending +- 🚫 FORBIDDEN to append content without user approval via 'C' +- 🎯 Content must be dense, precise, and zero-fluff (PRD quality standards) + +## EXECUTION PROTOCOLS: + +- 🎯 Show your analysis before taking any action +- ⚠️ Present A/P/C menu after generating executive summary content +- 💾 ONLY save when user chooses C (Continue) +- 📖 Update output file frontmatter, adding this step name to the end of the list of stepsCompleted +- 🚫 FORBIDDEN to load next step until C is selected + +## CONTEXT BOUNDARIES: + +- Current document and frontmatter from steps 1, 2, and 2b are available +- Project classification exists from step 2 (project type, domain, complexity, context) +- Vision and differentiator insights exist from step 2b +- Input documents from step 1 are available (product briefs, research, brainstorming, project docs) +- This step generates and appends the first substantive content to the PRD + +## YOUR TASK: + +Draft the Executive Summary section using all discovered insights, present it for user review, and append it to the PRD document when approved. + +## EXECUTIVE SUMMARY GENERATION SEQUENCE: + +### 1. Synthesize Available Context + +Review all available context before drafting: +- Classification from step 2: project type, domain, complexity, project context +- Vision and differentiator from step 2b: what makes this special, core insight +- Input documents: product briefs, research, brainstorming, project docs + +### 2. Draft Executive Summary Content + +Generate the Executive Summary section using the content structure below. Apply PRD quality standards: +- High information density — every sentence carries weight +- Zero fluff — no filler phrases or vague language +- Precise and actionable — clear, specific statements +- Dual-audience optimized — readable by humans, consumable by LLMs + +### 3. Present Draft for Review + +Present the drafted content to the user for review: + +"Here's the Executive Summary I've drafted based on our discovery work. Please review and let me know if you'd like any changes:" + +Show the full drafted content using the structure from the Content Structure section below. + +Allow the user to: +- Request specific changes to any section +- Add missing information +- Refine the language or emphasis +- Approve as-is + +### N. Present MENU OPTIONS + +Present the executive summary content for user review, then display menu: + +"Here's the Executive Summary for your PRD. Review the content above and let me know what you'd like to do." + +Display: "**Select:** [A] Advanced Elicitation [P] Party Mode [C] Continue to Success Criteria (Step 3 of 13)" + +#### Menu Handling Logic: +- IF A: Read fully and follow: {advancedElicitationTask} with the current executive summary content, process the enhanced content that comes back, ask user if they accept the improvements, if yes update content then redisplay menu, if no keep original content then redisplay menu +- IF P: Read fully and follow: {partyModeWorkflow} with the current executive summary content, process the collaborative improvements, ask user if they accept the changes, if yes update content then redisplay menu, if no keep original content then redisplay menu +- IF C: Append the final content to {outputFile}, update frontmatter by adding this step name to the end of the stepsCompleted array, then read fully and follow: {nextStepFile} +- IF Any other: help user respond, then redisplay menu + +#### EXECUTION RULES: +- ALWAYS halt and wait for user input after presenting menu +- ONLY proceed to next step when user selects 'C' +- After other menu items execution, return to this menu + +## APPEND TO DOCUMENT: + +When user selects 'C', append the following content structure directly to the document: + +```markdown +## Executive Summary + +{vision_alignment_content} + +### What Makes This Special + +{product_differentiator_content} + +## Project Classification + +{project_classification_content} +``` + +Where: +- `{vision_alignment_content}` — Product vision, target users, and the problem being solved. Dense, precise summary drawn from step 2b vision discovery. +- `{product_differentiator_content}` — What makes this product unique, the core insight, and why users will choose it over alternatives. Drawn from step 2b differentiator discovery. +- `{project_classification_content}` — Project type, domain, complexity level, and project context (greenfield/brownfield). Drawn from step 2 classification. + +## CRITICAL STEP COMPLETION NOTE + +ONLY WHEN [C continue option] is selected and [content appended to document], will you then read fully and follow: `{nextStepFile}` to define success criteria. + +--- + +## 🚨 SYSTEM SUCCESS/FAILURE METRICS + +### ✅ SUCCESS: + +- Executive Summary drafted using insights from steps 2 and 2b +- Content meets PRD quality standards (dense, precise, zero-fluff) +- Draft presented to user for review before saving +- User given opportunity to refine content +- Content properly appended to document when C selected +- A/P/C menu presented and handled correctly +- Frontmatter updated with stepsCompleted when C selected + +### ❌ SYSTEM FAILURE: + +- Generating content without incorporating discovered vision and classification +- Appending content without user selecting 'C' +- Producing vague, fluffy, or low-density content +- Not presenting draft for user review +- Not presenting A/P/C menu after content generation +- Skipping directly to next step without appending content + +❌ **CRITICAL**: Reading only partial step file - leads to incomplete understanding and poor decisions +❌ **CRITICAL**: Proceeding with 'C' without fully reading and understanding the next step file +❌ **CRITICAL**: Making decisions without complete understanding of step requirements and protocols + +**Master Rule:** Generate high-quality Executive Summary content from discovered insights. Present for review, refine collaboratively, and only save when the user approves. This is the first substantive content in the PRD — it sets the quality bar for everything that follows. diff --git a/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-v/step-v-01-discovery.md b/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-v/step-v-01-discovery.md index b79e12f..6c591c2 100644 --- a/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-v/step-v-01-discovery.md +++ b/_bmad/bmm/workflows/2-plan-workflows/create-prd/steps-v/step-v-01-discovery.md @@ -70,14 +70,22 @@ This file contains the BMAD PRD philosophy, standards, and validation criteria t **If PRD path provided as invocation parameter:** - Use provided path -**If no PRD path provided:** -"**PRD Validation Workflow** +**If no PRD path provided, auto-discover:** +- Search `{planning_artifacts}` for files matching `*prd*.md` +- Also check for sharded PRDs: `{planning_artifacts}/*prd*/*.md` -Which PRD would you like to validate? +**If exactly ONE PRD found:** +- Use it automatically +- Inform user: "Found PRD: {discovered_path} — using it for validation." -Please provide the path to the PRD file you want to validate." +**If MULTIPLE PRDs found:** +- List all discovered PRDs with numbered options +- "I found multiple PRDs. Which one would you like to validate?" +- Wait for user selection -**Wait for user to provide PRD path.** +**If NO PRDs found:** +- "I couldn't find any PRD files in {planning_artifacts}. Please provide the path to the PRD file you want to validate." +- Wait for user to provide PRD path. ### 3. Validate PRD Exists and Load diff --git a/_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-validate-prd.md b/_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-validate-prd.md index 67a1aaf..e217451 100644 --- a/_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-validate-prd.md +++ b/_bmad/bmm/workflows/2-plan-workflows/create-prd/workflow-validate-prd.md @@ -60,6 +60,4 @@ Load and read full config from {main_config} and resolve: "**Validate Mode: Validating an existing PRD against BMAD standards.**" -Prompt for PRD path: "Which PRD would you like to validate? Please provide the path to the PRD.md file." - Then read fully and follow: `{validateWorkflow}` (steps-v/step-v-01-discovery.md) diff --git a/_bmad/bmm/workflows/4-implementation/code-review/workflow.yaml b/_bmad/bmm/workflows/4-implementation/code-review/workflow.yaml index 5b5f6b2..c6edf84 100644 --- a/_bmad/bmm/workflows/4-implementation/code-review/workflow.yaml +++ b/_bmad/bmm/workflows/4-implementation/code-review/workflow.yaml @@ -12,7 +12,6 @@ document_output_language: "{config_source}:document_output_language" date: system-generated planning_artifacts: "{config_source}:planning_artifacts" implementation_artifacts: "{config_source}:implementation_artifacts" -output_folder: "{implementation_artifacts}" sprint_status: "{implementation_artifacts}/sprint-status.yaml" # Workflow components @@ -21,10 +20,7 @@ instructions: "{installed_path}/instructions.xml" validation: "{installed_path}/checklist.md" template: false -variables: - # Project context - project_context: "**/project-context.md" - story_dir: "{implementation_artifacts}" +project_context: "**/project-context.md" # Smart input file references - handles both whole docs and sharded docs # Priority: Whole document first, then sharded version diff --git a/_bmad/bmm/workflows/4-implementation/correct-course/instructions.md b/_bmad/bmm/workflows/4-implementation/correct-course/instructions.md index 430239a..bbe2c21 100644 --- a/_bmad/bmm/workflows/4-implementation/correct-course/instructions.md +++ b/_bmad/bmm/workflows/4-implementation/correct-course/instructions.md @@ -10,6 +10,7 @@ + Load {project_context} for coding standards and project-wide patterns (if exists) Confirm change trigger and gather user description of the issue Ask: "What specific issue or change has been identified that requires navigation?" Verify access to required project documents: diff --git a/_bmad/bmm/workflows/4-implementation/correct-course/workflow.yaml b/_bmad/bmm/workflows/4-implementation/correct-course/workflow.yaml index 318b5a7..6eb4b7f 100644 --- a/_bmad/bmm/workflows/4-implementation/correct-course/workflow.yaml +++ b/_bmad/bmm/workflows/4-implementation/correct-course/workflow.yaml @@ -12,8 +12,7 @@ date: system-generated implementation_artifacts: "{config_source}:implementation_artifacts" planning_artifacts: "{config_source}:planning_artifacts" project_knowledge: "{config_source}:project_knowledge" -output_folder: "{implementation_artifacts}" -sprint_status: "{implementation_artifacts}/sprint-status.yaml" +project_context: "**/project-context.md" # Smart input file references - handles both whole docs and sharded docs # Priority: Whole document first, then sharded version @@ -51,6 +50,5 @@ input_file_patterns: installed_path: "{project-root}/_bmad/bmm/workflows/4-implementation/correct-course" template: false instructions: "{installed_path}/instructions.md" -validation: "{installed_path}/checklist.md" checklist: "{installed_path}/checklist.md" default_output_file: "{planning_artifacts}/sprint-change-proposal-{date}.md" diff --git a/_bmad/bmm/workflows/4-implementation/create-story/checklist.md b/_bmad/bmm/workflows/4-implementation/create-story/checklist.md index 55e6c39..6fc6789 100644 --- a/_bmad/bmm/workflows/4-implementation/create-story/checklist.md +++ b/_bmad/bmm/workflows/4-implementation/create-story/checklist.md @@ -49,7 +49,7 @@ This is a COMPETITION to create the **ULTIMATE story context** that makes LLM de ### **Required Inputs:** - **Story file**: The story file to review and improve -- **Workflow variables**: From workflow.yaml (story_dir, output_folder, epics_file, etc.) +- **Workflow variables**: From workflow.yaml (implementation_artifacts, epics_file, etc.) - **Source documents**: Epics, architecture, etc. (discovered or provided) - **Validation framework**: `validate-workflow.xml` (handles checklist execution) @@ -65,7 +65,7 @@ You will systematically re-do the entire story creation process, but with a crit 2. **Load the story file**: `{story_file_path}` (provided by user or discovered) 3. **Load validation framework**: `{project-root}/_bmad/core/tasks/validate-workflow.xml` 4. **Extract metadata**: epic_num, story_num, story_key, story_title from story file -5. **Resolve all workflow variables**: story_dir, output_folder, epics_file, architecture_file, etc. +5. **Resolve all workflow variables**: implementation_artifacts, epics_file, architecture_file, etc. 6. **Understand current status**: What story implementation guidance is currently provided? **Note:** If running in fresh context, user should provide the story file path being reviewed. If running from create-story workflow, the validation framework will automatically discover the checklist and story file. diff --git a/_bmad/bmm/workflows/4-implementation/create-story/instructions.xml b/_bmad/bmm/workflows/4-implementation/create-story/instructions.xml index 81eb822..f943337 100644 --- a/_bmad/bmm/workflows/4-implementation/create-story/instructions.xml +++ b/_bmad/bmm/workflows/4-implementation/create-story/instructions.xml @@ -192,7 +192,8 @@ (As a, I want, so that) - Detailed acceptance criteria (already BDD formatted) - Technical requirements specific to this story - Business context and value - Success criteria - Load previous story file: {{story_dir}}/{{epic_num}}-{{previous_story_num}}-*.md **PREVIOUS STORY INTELLIGENCE:** - + Find {{previous_story_num}}: scan {implementation_artifacts} for the story file in epic {{epic_num}} with the highest story number less than {{story_num}} + Load previous story file: {implementation_artifacts}/{{epic_num}}-{{previous_story_num}}-*.md **PREVIOUS STORY INTELLIGENCE:** - Dev notes and learnings from previous story - Review feedback and corrections needed - Files that were created/modified and their patterns - Testing approaches that worked/didn't work - Problems encountered and solutions found - Code patterns established Extract all learnings that could impact current story implementation diff --git a/_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml b/_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml index 1f3ac97..991f78c 100644 --- a/_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml +++ b/_bmad/bmm/workflows/4-implementation/create-story/workflow.yaml @@ -6,11 +6,11 @@ author: "BMad" config_source: "{project-root}/_bmad/bmm/config.yaml" user_name: "{config_source}:user_name" communication_language: "{config_source}:communication_language" +document_output_language: "{config_source}:document_output_language" +user_skill_level: "{config_source}:user_skill_level" date: system-generated planning_artifacts: "{config_source}:planning_artifacts" implementation_artifacts: "{config_source}:implementation_artifacts" -output_folder: "{implementation_artifacts}" -story_dir: "{implementation_artifacts}" # Workflow components installed_path: "{project-root}/_bmad/bmm/workflows/4-implementation/create-story" @@ -19,18 +19,14 @@ instructions: "{installed_path}/instructions.xml" validation: "{installed_path}/checklist.md" # Variables and inputs -variables: - sprint_status: "{implementation_artifacts}/sprint-status.yaml" # Primary source for story tracking - epics_file: "{planning_artifacts}/epics.md" # Enhanced epics+stories with BDD and source hints - prd_file: "{planning_artifacts}/prd.md" # Fallback for requirements (if not in epics file) - architecture_file: "{planning_artifacts}/architecture.md" # Fallback for constraints (if not in epics file) - ux_file: "{planning_artifacts}/*ux*.md" # Fallback for UX requirements (if not in epics file) - story_title: "" # Will be elicited if not derivable - -# Project context +sprint_status: "{implementation_artifacts}/sprint-status.yaml" # Primary source for story tracking +epics_file: "{planning_artifacts}/epics.md" # Enhanced epics+stories with BDD and source hints +prd_file: "{planning_artifacts}/prd.md" # Fallback for requirements (if not in epics file) +architecture_file: "{planning_artifacts}/architecture.md" # Fallback for constraints (if not in epics file) +ux_file: "{planning_artifacts}/*ux*.md" # Fallback for UX requirements (if not in epics file) +story_title: "" # Will be elicited if not derivable project_context: "**/project-context.md" - -default_output_file: "{story_dir}/{{story_key}}.md" +default_output_file: "{implementation_artifacts}/{{story_key}}.md" # Smart input file references - Simplified for enhanced approach # The epics+stories file should contain everything needed with source hints diff --git a/_bmad/bmm/workflows/4-implementation/dev-story/instructions.xml b/_bmad/bmm/workflows/4-implementation/dev-story/instructions.xml index 6150944..3c4989f 100644 --- a/_bmad/bmm/workflows/4-implementation/dev-story/instructions.xml +++ b/_bmad/bmm/workflows/4-implementation/dev-story/instructions.xml @@ -78,7 +78,7 @@ - Search {story_dir} for stories directly + Search {implementation_artifacts} for stories directly Find stories with "ready-for-dev" status in files Look for story files matching pattern: *-*-*.md Read each candidate story file to check Status section @@ -114,7 +114,7 @@ Store the found story_key (e.g., "1-2-user-authentication") for later status updates - Find matching story file in {story_dir} using story_key pattern: {{story_key}}.md + Find matching story file in {implementation_artifacts} using story_key pattern: {{story_key}}.md Read COMPLETE story file from discovered path diff --git a/_bmad/bmm/workflows/4-implementation/dev-story/workflow.yaml b/_bmad/bmm/workflows/4-implementation/dev-story/workflow.yaml index daf152b..c8a85a0 100644 --- a/_bmad/bmm/workflows/4-implementation/dev-story/workflow.yaml +++ b/_bmad/bmm/workflows/4-implementation/dev-story/workflow.yaml @@ -4,12 +4,10 @@ author: "BMad" # Critical variables from config config_source: "{project-root}/_bmad/bmm/config.yaml" -output_folder: "{config_source}:output_folder" user_name: "{config_source}:user_name" communication_language: "{config_source}:communication_language" user_skill_level: "{config_source}:user_skill_level" document_output_language: "{config_source}:document_output_language" -story_dir: "{config_source}:implementation_artifacts" date: system-generated # Workflow components diff --git a/_bmad/bmm/workflows/4-implementation/retrospective/instructions.md b/_bmad/bmm/workflows/4-implementation/retrospective/instructions.md index 0175031..018ef6e 100644 --- a/_bmad/bmm/workflows/4-implementation/retrospective/instructions.md +++ b/_bmad/bmm/workflows/4-implementation/retrospective/instructions.md @@ -31,6 +31,7 @@ PARTY MODE PROTOCOL: +Load {project_context} for project-wide patterns and conventions (if exists) Explain to {user_name} the epic discovery process using natural dialogue @@ -80,7 +81,7 @@ Bob (Scrum Master): "I'm having trouble detecting the completed epic from {sprin PRIORITY 3: Fallback to stories folder -Scan {story_directory} for highest numbered story files +Scan {implementation_artifacts} for highest numbered story files Extract epic numbers from story filenames (pattern: epic-X-Y-story-name.md) Set {{detected_epic}} = highest epic number found @@ -170,7 +171,7 @@ Bob (Scrum Master): "Before we start the team discussion, let me review all the Charlie (Senior Dev): "Good idea - those dev notes always have gold in them." -For each story in epic {{epic_number}}, read the complete story file from {story_directory}/{{epic_number}}-{{story_num}}-\*.md +For each story in epic {{epic_number}}, read the complete story file from {implementation_artifacts}/{{epic_number}}-{{story_num}}-*.md Extract and analyze from each story: @@ -261,14 +262,14 @@ Bob (Scrum Master): "We'll get to all of it. But first, let me load the previous Calculate previous epic number: {{prev_epic_num}} = {{epic_number}} - 1 - Search for previous retrospective using pattern: {retrospectives_folder}/epic-{{prev_epic_num}}-retro-*.md + Search for previous retrospectives using pattern: {implementation_artifacts}/epic-{{prev_epic_num}}-retro-*.md - + -Bob (Scrum Master): "I found our retrospective from Epic {{prev_epic_num}}. Let me see what we committed to back then..." +Bob (Scrum Master): "I found our retrospectives from Epic {{prev_epic_num}}. Let me see what we committed to back then..." - Read the complete previous retrospective file + Read the previous retrospectives Extract key elements: - **Action items committed**: What did the team agree to improve? @@ -365,7 +366,7 @@ Alice (Product Owner): "Good thinking - helps us connect what we learned to what Attempt to load next epic using selective loading strategy: **Try sharded first (more specific):** -Check if file exists: {planning_artifacts}/epic\*/epic-{{next_epic_num}}.md +Check if file exists: {planning_artifacts}/epic*/epic-{{next_epic_num}}.md Load {planning_artifacts}/*epic*/epic-{{next_epic_num}}.md @@ -374,7 +375,7 @@ Alice (Product Owner): "Good thinking - helps us connect what we learned to what **Fallback to whole document:** -Check if file exists: {planning_artifacts}/epic\*.md +Check if file exists: {planning_artifacts}/epic*.md Load entire epics document @@ -1302,7 +1303,7 @@ Bob (Scrum Master): "See you all when prep work is done. Meeting adjourned!" -Ensure retrospectives folder exists: {retrospectives_folder} +Ensure retrospectives folder exists: {implementation_artifacts} Create folder if it doesn't exist Generate comprehensive retrospective summary document including: @@ -1322,11 +1323,11 @@ Bob (Scrum Master): "See you all when prep work is done. Meeting adjourned!" - Commitments and next steps Format retrospective document as readable markdown with clear sections -Set filename: {retrospectives_folder}/epic-{{epic_number}}-retro-{date}.md +Set filename: {implementation_artifacts}/epic-{{epic_number}}-retro-{date}.md Save retrospective document -✅ Retrospective document saved: {retrospectives_folder}/epic-{{epic_number}}-retro-{date}.md +✅ Retrospective document saved: {implementation_artifacts}/epic-{{epic_number}}-retro-{date}.md Update {sprint_status_file} to mark retrospective as completed @@ -1365,7 +1366,7 @@ Retrospective document was saved successfully, but {sprint_status_file} may need - Epic {{epic_number}}: {{epic_title}} reviewed - Retrospective Status: completed -- Retrospective saved: {retrospectives_folder}/epic-{{epic_number}}-retro-{date}.md +- Retrospective saved: {implementation_artifacts}/epic-{{epic_number}}-retro-{date}.md **Commitments Made:** @@ -1375,7 +1376,7 @@ Retrospective document was saved successfully, but {sprint_status_file} may need **Next Steps:** -1. **Review retrospective summary**: {retrospectives_folder}/epic-{{epic_number}}-retro-{date}.md +1. **Review retrospective summary**: {implementation_artifacts}/epic-{{epic_number}}-retro-{date}.md 2. **Execute preparation sprint** (Est: {{prep_days}} days) - Complete {{critical_count}} critical path items diff --git a/_bmad/bmm/workflows/4-implementation/retrospective/workflow.yaml b/_bmad/bmm/workflows/4-implementation/retrospective/workflow.yaml index b92ecaf..773c7f2 100644 --- a/_bmad/bmm/workflows/4-implementation/retrospective/workflow.yaml +++ b/_bmad/bmm/workflows/4-implementation/retrospective/workflow.yaml @@ -4,7 +4,6 @@ description: "Run after epic completion to review overall success, extract lesso author: "BMad" config_source: "{project-root}/_bmad/bmm/config.yaml" -output_folder: "{config_source}:implementation_artifacts}" user_name: "{config_source}:user_name" communication_language: "{config_source}:communication_language" user_skill_level: "{config_source}:user_skill_level" @@ -12,6 +11,7 @@ document_output_language: "{config_source}:document_output_language" date: system-generated planning_artifacts: "{config_source}:planning_artifacts" implementation_artifacts: "{config_source}:implementation_artifacts" +project_context: "**/project-context.md" installed_path: "{project-root}/_bmad/bmm/workflows/4-implementation/retrospective" template: false @@ -51,5 +51,3 @@ input_file_patterns: # Required files sprint_status_file: "{implementation_artifacts}/sprint-status.yaml" -story_directory: "{implementation_artifacts}" -retrospectives_folder: "{implementation_artifacts}" diff --git a/_bmad/bmm/workflows/4-implementation/sprint-planning/instructions.md b/_bmad/bmm/workflows/4-implementation/sprint-planning/instructions.md index c4f4bd4..316d2fe 100644 --- a/_bmad/bmm/workflows/4-implementation/sprint-planning/instructions.md +++ b/_bmad/bmm/workflows/4-implementation/sprint-planning/instructions.md @@ -23,6 +23,7 @@ +Load {project_context} for project-wide patterns and conventions (if exists) Communicate in {communication_language} with {user_name} Look for all files matching `{epics_pattern}` in {epics_location} Could be a single `epics.md` file or multiple `epic-1.md`, `epic-2.md` files diff --git a/_bmad/bmm/workflows/4-implementation/sprint-planning/workflow.yaml b/_bmad/bmm/workflows/4-implementation/sprint-planning/workflow.yaml index 7b15763..6c5d22d 100644 --- a/_bmad/bmm/workflows/4-implementation/sprint-planning/workflow.yaml +++ b/_bmad/bmm/workflows/4-implementation/sprint-planning/workflow.yaml @@ -9,7 +9,6 @@ communication_language: "{config_source}:communication_language" date: system-generated implementation_artifacts: "{config_source}:implementation_artifacts" planning_artifacts: "{config_source}:planning_artifacts" -output_folder: "{implementation_artifacts}" # Workflow components installed_path: "{project-root}/_bmad/bmm/workflows/4-implementation/sprint-planning" @@ -18,24 +17,21 @@ template: "{installed_path}/sprint-status-template.yaml" validation: "{installed_path}/checklist.md" # Variables and inputs -variables: - # Project context - project_context: "**/project-context.md" - # Project identification - project_name: "{config_source}:project_name" +project_context: "**/project-context.md" +project_name: "{config_source}:project_name" - # Tracking system configuration - tracking_system: "file-system" # Options: file-system, Future will support other options from config of mcp such as jira, linear, trello - project_key: "NOKEY" # Placeholder for tracker integrations; file-system uses a no-op key - story_location: "{config_source}:implementation_artifacts" # Relative path for file-system, Future will support URL for Jira/Linear/Trello - story_location_absolute: "{config_source}:implementation_artifacts" # Absolute path for file operations +# Tracking system configuration +tracking_system: "file-system" # Options: file-system, Future will support other options from config of mcp such as jira, linear, trello +project_key: "NOKEY" # Placeholder for tracker integrations; file-system uses a no-op key +story_location: "{implementation_artifacts}" # Relative path for file-system, Future will support URL for Jira/Linear/Trello +story_location_absolute: "{implementation_artifacts}" # Absolute path for file operations - # Source files (file-system only) - epics_location: "{planning_artifacts}" # Directory containing epic*.md files - epics_pattern: "epic*.md" # Pattern to find epic files +# Source files (file-system only) +epics_location: "{planning_artifacts}" # Directory containing epic*.md files +epics_pattern: "epic*.md" # Pattern to find epic files - # Output configuration - status_file: "{implementation_artifacts}/sprint-status.yaml" +# Output configuration +status_file: "{implementation_artifacts}/sprint-status.yaml" # Smart input file references - handles both whole docs and sharded docs # Priority: Whole document first, then sharded version @@ -43,8 +39,8 @@ variables: input_file_patterns: epics: description: "All epics with user stories" - whole: "{output_folder}/*epic*.md" - sharded: "{output_folder}/*epic*/*.md" + whole: "{planning_artifacts}/*epic*.md" + sharded: "{planning_artifacts}/*epic*/*.md" load_strategy: "FULL_LOAD" # Output configuration diff --git a/_bmad/bmm/workflows/4-implementation/sprint-status/instructions.md b/_bmad/bmm/workflows/4-implementation/sprint-status/instructions.md index c058644..4182e1f 100644 --- a/_bmad/bmm/workflows/4-implementation/sprint-status/instructions.md +++ b/_bmad/bmm/workflows/4-implementation/sprint-status/instructions.md @@ -24,6 +24,7 @@ + Load {project_context} for project-wide patterns and conventions (if exists) Try {sprint_status_file} ❌ sprint-status.yaml not found. diff --git a/_bmad/bmm/workflows/4-implementation/sprint-status/workflow.yaml b/_bmad/bmm/workflows/4-implementation/sprint-status/workflow.yaml index 8946f02..f27d570 100644 --- a/_bmad/bmm/workflows/4-implementation/sprint-status/workflow.yaml +++ b/_bmad/bmm/workflows/4-implementation/sprint-status/workflow.yaml @@ -5,22 +5,17 @@ author: "BMad" # Critical variables from config config_source: "{project-root}/_bmad/bmm/config.yaml" -output_folder: "{config_source}:output_folder" user_name: "{config_source}:user_name" communication_language: "{config_source}:communication_language" document_output_language: "{config_source}:document_output_language" -date: system-generated implementation_artifacts: "{config_source}:implementation_artifacts" -planning_artifacts: "{config_source}:planning_artifacts" # Workflow components installed_path: "{project-root}/_bmad/bmm/workflows/4-implementation/sprint-status" instructions: "{installed_path}/instructions.md" # Inputs -variables: - sprint_status_file: "{implementation_artifacts}/sprint-status.yaml" - tracking_system: "file-system" +sprint_status_file: "{implementation_artifacts}/sprint-status.yaml" # Smart input file references input_file_patterns: diff --git a/_bmad/bmm/workflows/bmad-quick-flow/quick-dev/workflow.md b/_bmad/bmm/workflows/bmad-quick-flow/quick-dev/workflow.md index 3fbeb13..8c6a190 100644 --- a/_bmad/bmm/workflows/bmad-quick-flow/quick-dev/workflow.md +++ b/_bmad/bmm/workflows/bmad-quick-flow/quick-dev/workflow.md @@ -28,7 +28,7 @@ This uses **step-file architecture** for focused execution: Load config from `{project-root}/_bmad/bmm/config.yaml` and resolve: - `user_name`, `communication_language`, `user_skill_level` -- `output_folder`, `planning_artifacts`, `implementation_artifacts` +- `planning_artifacts`, `implementation_artifacts` - `date` as system-generated current datetime - ✅ YOU MUST ALWAYS SPEAK OUTPUT In your Agent communication style with the config `{communication_language}` diff --git a/_bmad/bmm/workflows/bmad-quick-flow/quick-spec/steps/step-01-understand.md b/_bmad/bmm/workflows/bmad-quick-flow/quick-spec/steps/step-01-understand.md index d338f24..f0622f2 100644 --- a/_bmad/bmm/workflows/bmad-quick-flow/quick-spec/steps/step-01-understand.md +++ b/_bmad/bmm/workflows/bmad-quick-flow/quick-spec/steps/step-01-understand.md @@ -76,7 +76,7 @@ a) **Before asking detailed questions, do a rapid scan to understand the landsca b) **Check for existing context docs:** -- Check `{output_folder}` and `{planning_artifacts}`for planning documents (PRD, architecture, epics, research) +- Check `{implementation_artifacts}` and `{planning_artifacts}`for planning documents (PRD, architecture, epics, research) - Check for `**/project-context.md` - if it exists, skim for patterns and conventions - Check for any existing stories or specs related to user's request diff --git a/_bmad/bmm/workflows/bmad-quick-flow/quick-spec/workflow.md b/_bmad/bmm/workflows/bmad-quick-flow/quick-spec/workflow.md index 7c41b94..cc4fdf2 100644 --- a/_bmad/bmm/workflows/bmad-quick-flow/quick-spec/workflow.md +++ b/_bmad/bmm/workflows/bmad-quick-flow/quick-spec/workflow.md @@ -68,9 +68,10 @@ This uses **step-file architecture** for disciplined execution: Load and read full config from `{main_config}` and resolve: -- `project_name`, `output_folder`, `planning_artifacts`, `implementation_artifacts`, `user_name` +- `project_name`, `planning_artifacts`, `implementation_artifacts`, `user_name` - `communication_language`, `document_output_language`, `user_skill_level` - `date` as system-generated current datetime +- `project_context` = `**/project-context.md` (load if exists) - ✅ YOU MUST ALWAYS SPEAK OUTPUT In your Agent communication style with the config `{communication_language}` ### 2. First Step Execution diff --git a/_bmad/bmm/workflows/document-project/instructions.md b/_bmad/bmm/workflows/document-project/instructions.md index 2f567fa..0354be6 100644 --- a/_bmad/bmm/workflows/document-project/instructions.md +++ b/_bmad/bmm/workflows/document-project/instructions.md @@ -8,56 +8,8 @@ This router determines workflow mode and delegates to specialized sub-workflows - - - - mode: data - data_request: project_config - - - - {{suggestion}} - Note: Documentation workflow can run standalone. Continuing without progress tracking. - Set standalone_mode = true - Set status_file_found = false - - - - Store {{status_file_path}} for later updates - Set status_file_found = true - - - - Note: This is a greenfield project. Documentation workflow is typically for brownfield projects. - Continue anyway to document planning artifacts? (y/n) - - Exit workflow - - - - - - mode: validate - calling_workflow: document-project - - - - {{warning}} - Note: This may be auto-invoked by prd for brownfield documentation. - Continue with documentation? (y/n) - - {{suggestion}} - Exit workflow - - - - - - - -SMART LOADING STRATEGY: Check state file FIRST before loading any CSV files - -Check for existing state file at: {output_folder}/project-scan-report.json + +Check for existing state file at: {project_knowledge}/project-scan-report.json Read state file and extract: timestamps, mode, scan_level, current_step, completed_steps, project_classification @@ -66,21 +18,21 @@ I found an in-progress workflow state from {{last_updated}}. -**Current Progress:** + **Current Progress:** -- Mode: {{mode}} -- Scan Level: {{scan_level}} -- Completed Steps: {{completed_steps_count}}/{{total_steps}} -- Last Step: {{current_step}} -- Project Type(s): {{cached_project_types}} + - Mode: {{mode}} + - Scan Level: {{scan_level}} + - Completed Steps: {{completed_steps_count}}/{{total_steps}} + - Last Step: {{current_step}} + - Project Type(s): {{cached_project_types}} -Would you like to: + Would you like to: -1. **Resume from where we left off** - Continue from step {{current_step}} -2. **Start fresh** - Archive old state and begin new scan -3. **Cancel** - Exit without changes + 1. **Resume from where we left off** - Continue from step {{current_step}} + 2. **Start fresh** - Archive old state and begin new scan + 3. **Cancel** - Exit without changes -Your choice [1/2/3]: + Your choice [1/2/3]: @@ -107,8 +59,8 @@ Your choice [1/2/3]: - Create archive directory: {output_folder}/.archive/ - Move old state file to: {output_folder}/.archive/project-scan-report-{{timestamp}}.json + Create archive directory: {project_knowledge}/.archive/ + Move old state file to: {project_knowledge}/.archive/project-scan-report-{{timestamp}}.json Set resume_mode = false Continue to Step 0.5 @@ -120,7 +72,7 @@ Your choice [1/2/3]: Display: "Found old state file (>24 hours). Starting fresh scan." - Archive old state file to: {output_folder}/.archive/project-scan-report-{{timestamp}}.json + Archive old state file to: {project_knowledge}/.archive/project-scan-report-{{timestamp}}.json Set resume_mode = false Continue to Step 0.5 @@ -128,7 +80,7 @@ Your choice [1/2/3]: -Check if {output_folder}/index.md exists +Check if {project_knowledge}/index.md exists Read existing index.md to extract metadata (date, project structure, parts count) @@ -175,47 +127,4 @@ Your choice [1/2/3]: - - - - - mode: update - action: complete_workflow - workflow_name: document-project - - - - Status updated! - - - -**✅ Document Project Workflow Complete, {user_name}!** - -**Documentation Generated:** - -- Mode: {{workflow_mode}} -- Scan Level: {{scan_level}} -- Output: {output_folder}/index.md and related files - -{{#if status_file_found}} -**Status Updated:** - -- Progress tracking updated - -**Next Steps:** - -- **Next required:** {{next_workflow}} ({{next_agent}} agent) - -Check status anytime with: `workflow-status` -{{else}} -**Next Steps:** -Since no workflow is in progress: - -- Refer to the BMM workflow guide if unsure what to do next -- Or run `workflow-init` to create a workflow path and get guided next steps - {{/if}} - - - - diff --git a/_bmad/bmm/workflows/document-project/templates/project-scan-report-schema.json b/_bmad/bmm/workflows/document-project/templates/project-scan-report-schema.json index 8133e15..69e0598 100644 --- a/_bmad/bmm/workflows/document-project/templates/project-scan-report-schema.json +++ b/_bmad/bmm/workflows/document-project/templates/project-scan-report-schema.json @@ -45,9 +45,9 @@ "type": "string", "description": "Absolute path to project root directory" }, - "output_folder": { + "project_knowledge": { "type": "string", - "description": "Absolute path to output folder" + "description": "Absolute path to project knowledge folder" }, "completed_steps": { "type": "array", diff --git a/_bmad/bmm/workflows/document-project/workflow.yaml b/_bmad/bmm/workflows/document-project/workflow.yaml index 4667d7c..be9600c 100644 --- a/_bmad/bmm/workflows/document-project/workflow.yaml +++ b/_bmad/bmm/workflows/document-project/workflow.yaml @@ -6,7 +6,7 @@ author: "BMad" # Critical variables config_source: "{project-root}/_bmad/bmm/config.yaml" -output_folder: "{config_source}:project_knowledge" +project_knowledge: "{config_source}:project_knowledge" user_name: "{config_source}:user_name" communication_language: "{config_source}:communication_language" document_output_language: "{config_source}:document_output_language" diff --git a/_bmad/bmm/workflows/document-project/workflows/deep-dive-instructions.md b/_bmad/bmm/workflows/document-project/workflows/deep-dive-instructions.md index c88dfb0..637621c 100644 --- a/_bmad/bmm/workflows/document-project/workflows/deep-dive-instructions.md +++ b/_bmad/bmm/workflows/document-project/workflows/deep-dive-instructions.md @@ -194,7 +194,7 @@ This will read EVERY file in this area. Proceed? [y/n] Load complete deep-dive template from: {installed_path}/templates/deep-dive-template.md Fill template with all collected data from steps 13b-13d -Write filled template to: {output_folder}/deep-dive-{{sanitized_target_name}}.md +Write filled template to: {project_knowledge}/deep-dive-{{sanitized_target_name}}.md Validate deep-dive document completeness deep_dive_documentation @@ -241,7 +241,7 @@ Detailed exhaustive analysis of specific areas: ## Deep-Dive Documentation Complete! ✓ -**Generated:** {output_folder}/deep-dive-{{target_name}}.md +**Generated:** {project_knowledge}/deep-dive-{{target_name}}.md **Files Analyzed:** {{file_count}} **Lines of Code Scanned:** {{total_loc}} **Time Taken:** ~{{duration}} @@ -255,7 +255,7 @@ Detailed exhaustive analysis of specific areas: - Related code and reuse opportunities - Implementation guidance -**Index Updated:** {output_folder}/index.md now includes link to this deep-dive +**Index Updated:** {project_knowledge}/index.md now includes link to this deep-dive ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ @@ -278,7 +278,7 @@ Your choice [1/2]: All deep-dive documentation complete! -**Master Index:** {output_folder}/index.md +**Master Index:** {project_knowledge}/index.md **Deep-Dives Generated:** {{deep_dive_count}} These comprehensive docs are now ready for: diff --git a/_bmad/bmm/workflows/document-project/workflows/deep-dive.yaml b/_bmad/bmm/workflows/document-project/workflows/deep-dive.yaml index a333cc4..c7b85c0 100644 --- a/_bmad/bmm/workflows/document-project/workflows/deep-dive.yaml +++ b/_bmad/bmm/workflows/document-project/workflows/deep-dive.yaml @@ -8,7 +8,7 @@ parent_workflow: "{project-root}/_bmad/bmm/workflows/document-project/workflow.y # Critical variables inherited from parent config_source: "{project-root}/_bmad/bmb/config.yaml" -output_folder: "{config_source}:output_folder" +project_knowledge: "{config_source}:project_knowledge" user_name: "{config_source}:user_name" date: system-generated diff --git a/_bmad/bmm/workflows/document-project/workflows/full-scan-instructions.md b/_bmad/bmm/workflows/document-project/workflows/full-scan-instructions.md index 1340f75..8a3621d 100644 --- a/_bmad/bmm/workflows/document-project/workflows/full-scan-instructions.md +++ b/_bmad/bmm/workflows/document-project/workflows/full-scan-instructions.md @@ -43,7 +43,7 @@ This workflow uses a single comprehensive CSV file to intelligently document you -Check if {output_folder}/index.md exists +Check if {project_knowledge}/index.md exists Read existing index.md to extract metadata (date, project structure, parts count) @@ -127,7 +127,7 @@ Your choice [1/2/3] (default: 1): Display: "Using Exhaustive Scan (reading all source files)" -Initialize state file: {output_folder}/project-scan-report.json +Initialize state file: {project_knowledge}/project-scan-report.json Every time you touch the state file, record: step id, human-readable summary (what you actually did), precise timestamp, and any outputs written. Vague phrases are unacceptable. Write initial state: { @@ -136,7 +136,7 @@ Your choice [1/2/3] (default: 1): "mode": "{{workflow_mode}}", "scan_level": "{{scan_level}}", "project_root": "{{project_root_path}}", -"output_folder": "{{output_folder}}", +"project_knowledge": "{{project_knowledge}}", "completed_steps": [], "current_step": "step_1", "findings": {}, @@ -325,7 +325,7 @@ findings.batches_completed: [ Build API contracts catalog -IMMEDIATELY write to: {output_folder}/api-contracts-{part_id}.md +IMMEDIATELY write to: {project_knowledge}/api-contracts-{part_id}.md Validate document has all required sections Update state file with output generated PURGE detailed API data, keep only: "{{api_count}} endpoints documented" @@ -346,7 +346,7 @@ findings.batches_completed: [ Build database schema documentation -IMMEDIATELY write to: {output_folder}/data-models-{part_id}.md +IMMEDIATELY write to: {project_knowledge}/data-models-{part_id}.md Validate document completeness Update state file with output generated PURGE detailed schema data, keep only: "{{table_count}} tables documented" @@ -805,7 +805,7 @@ When a document SHOULD be generated but wasn't (due to quick scan, missing data, Show summary of all generated files: -Generated in {{output_folder}}/: +Generated in {{project_knowledge}}/: {{file_list_with_sizes}} @@ -823,7 +823,7 @@ Generated in {{output_folder}}/: 3. Extract document metadata from each match for user selection -Read {output_folder}/index.md +Read {project_knowledge}/index.md Scan for incomplete documentation markers: Step 1: Search for exact pattern "_(To be generated)_" (case-sensitive) @@ -1065,9 +1065,9 @@ Enter number(s) separated by commas (e.g., "1,3,5"), or type 'all': ## Project Documentation Complete! ✓ -**Location:** {{output_folder}}/ +**Location:** {{project_knowledge}}/ -**Master Index:** {{output_folder}}/index.md +**Master Index:** {{project_knowledge}}/index.md 👆 This is your primary entry point for AI-assisted development **Generated Documentation:** @@ -1076,9 +1076,9 @@ Enter number(s) separated by commas (e.g., "1,3,5"), or type 'all': **Next Steps:** 1. Review the index.md to familiarize yourself with the documentation structure -2. When creating a brownfield PRD, point the PRD workflow to: {{output_folder}}/index.md -3. For UI-only features: Reference {{output_folder}}/architecture-{{ui_part_id}}.md -4. For API-only features: Reference {{output_folder}}/architecture-{{api_part_id}}.md +2. When creating a brownfield PRD, point the PRD workflow to: {{project_knowledge}}/index.md +3. For UI-only features: Reference {{project_knowledge}}/architecture-{{ui_part_id}}.md +4. For API-only features: Reference {{project_knowledge}}/architecture-{{api_part_id}}.md 5. For full-stack features: Reference both part architectures + integration-architecture.md **Verification Recap:** @@ -1101,6 +1101,6 @@ When ready to plan new features, run the PRD workflow and provide this index as - Write final state file -Display: "State file saved: {{output_folder}}/project-scan-report.json" +Display: "State file saved: {{project_knowledge}}/project-scan-report.json" diff --git a/_bmad/bmm/workflows/document-project/workflows/full-scan.yaml b/_bmad/bmm/workflows/document-project/workflows/full-scan.yaml index f62aba9..272baed 100644 --- a/_bmad/bmm/workflows/document-project/workflows/full-scan.yaml +++ b/_bmad/bmm/workflows/document-project/workflows/full-scan.yaml @@ -8,7 +8,7 @@ parent_workflow: "{project-root}/_bmad/bmm/workflows/document-project/workflow.y # Critical variables inherited from parent config_source: "{project-root}/_bmad/bmb/config.yaml" -output_folder: "{config_source}:output_folder" +project_knowledge: "{config_source}:project_knowledge" user_name: "{config_source}:user_name" date: system-generated diff --git a/_bmad/bmm/workflows/qa/automate/workflow.yaml b/_bmad/bmm/workflows/qa/automate/workflow.yaml index 847365d..f1119e9 100644 --- a/_bmad/bmm/workflows/qa/automate/workflow.yaml +++ b/_bmad/bmm/workflows/qa/automate/workflow.yaml @@ -5,7 +5,6 @@ author: "BMad" # Critical variables from config config_source: "{project-root}/_bmad/bmm/config.yaml" -output_folder: "{config_source}:output_folder" implementation_artifacts: "{config_source}:implementation_artifacts" user_name: "{config_source}:user_name" communication_language: "{config_source}:communication_language" @@ -19,10 +18,8 @@ validation: "{installed_path}/checklist.md" template: false # Variables and inputs -variables: - # Directory paths - test_dir: "{project-root}/tests" # Root test directory - source_dir: "{project-root}" # Source code directory +test_dir: "{project-root}/tests" # Root test directory +source_dir: "{project-root}" # Source code directory # Output configuration default_output_file: "{implementation_artifacts}/tests/test-summary.md" diff --git a/_bmad/cis/config.yaml b/_bmad/cis/config.yaml index f0625e6..34e07f1 100644 --- a/_bmad/cis/config.yaml +++ b/_bmad/cis/config.yaml @@ -1,7 +1,7 @@ # CIS Module Configuration # Generated by BMAD installer -# Version: 6.0.0-Beta.8 -# Date: 2026-02-12T20:59:55.871Z +# Version: 6.0.1 +# Date: 2026-02-18T19:38:59.436Z visual_tools: intermediate diff --git a/_bmad/core/agents/bmad-master.md b/_bmad/core/agents/bmad-master.md index b7ac0a8..5f204b7 100644 --- a/_bmad/core/agents/bmad-master.md +++ b/_bmad/core/agents/bmad-master.md @@ -6,7 +6,7 @@ description: "BMad Master Executor, Knowledge Custodian, and Workflow Orchestrat You must fully embody this agent's persona and follow all activation instructions exactly as specified. NEVER break character until given an exit command. ```xml - + Load persona from this current agent file (already in context) 🚨 IMMEDIATE ACTION REQUIRED - BEFORE ANY OUTPUT: @@ -42,7 +42,7 @@ You must fully embody this agent's persona and follow all activation instruction Master Task Executor + BMad Expert + Guiding Facilitator Orchestrator Master-level expert in the BMAD Core Platform and all loaded modules with comprehensive knowledge of all resources, tasks, and workflows. Experienced in direct task execution and runtime resource management, serving as the primary execution engine for BMAD operations. Direct and comprehensive, refers to himself in the 3rd person. Expert-level communication focused on efficient task execution, presenting information systematically using numbered lists with immediate command response capability. - - "Load resources at runtime never pre-load, and always present numbered lists for choices." + - Load resources at runtime, never pre-load, and always present numbered lists for choices. [MH] Redisplay Menu Help diff --git a/_bmad/core/config.yaml b/_bmad/core/config.yaml index fe673af..c604efb 100644 --- a/_bmad/core/config.yaml +++ b/_bmad/core/config.yaml @@ -1,7 +1,7 @@ # CORE Module Configuration # Generated by BMAD installer -# Version: 6.0.0-Beta.8 -# Date: 2026-02-12T20:59:55.871Z +# Version: 6.0.1 +# Date: 2026-02-18T19:38:59.436Z user_name: Sepehr communication_language: French diff --git a/_bmad/external_model.rs b/_bmad/external_model.rs new file mode 100644 index 0000000..6f1ed67 --- /dev/null +++ b/_bmad/external_model.rs @@ -0,0 +1,555 @@ +//! External Component Model Interface +//! +//! This module provides support for external component models via: +//! - Dynamic library loading (.dll/.so) via FFI +//! - HTTP API calls to external services +//! +//! ## Architecture +//! +//! The external model interface allows integration of proprietary or vendor-supplied +//! component models that cannot be implemented natively in Rust. +//! +//! ## FFI Interface (DLL/SO) +//! +//! External libraries must implement the `entropyk_model` C ABI: +//! +//! ```c +//! // Required exported functions: +//! int entropyk_model_compute(double* inputs, double* outputs, int n_in, int n_out); +//! int entropyk_model_jacobian(double* inputs, double* jacobian, int n_in, int n_out); +//! const char* entropyk_model_name(void); +//! const char* entropyk_model_version(void); +//! ``` +//! +//! ## HTTP API Interface +//! +//! External services must provide REST endpoints: +//! +//! - `POST /compute`: Accepts JSON with inputs, returns JSON with outputs +//! - `POST /jacobian`: Accepts JSON with inputs, returns JSON with Jacobian matrix + +use crate::ComponentError; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; +use std::sync::Arc; + +/// Configuration for an external model. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ExternalModelConfig { + /// Unique identifier for this model + pub id: String, + /// Model type (ffi or http) + pub model_type: ExternalModelType, + /// Number of inputs expected + pub n_inputs: usize, + /// Number of outputs produced + pub n_outputs: usize, + /// Optional timeout in milliseconds + #[serde(default = "default_timeout")] + pub timeout_ms: u64, +} + +fn default_timeout() -> u64 { + 5000 +} + +/// Type of external model interface. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub enum ExternalModelType { + /// Dynamic library (.dll on Windows, .so on Linux, .dylib on macOS) + Ffi { + /// Path to the library file + library_path: PathBuf, + /// Optional function name prefix + function_prefix: Option, + }, + /// HTTP REST API + Http { + /// Base URL for the API + base_url: String, + /// Optional API key for authentication + api_key: Option, + }, +} + +/// Trait for external model implementations. +/// +/// This trait abstracts over FFI and HTTP interfaces, providing +/// a unified interface for the solver. +pub trait ExternalModel: Send + Sync { + /// Returns the model identifier. + fn id(&self) -> &str; + + /// Returns the number of inputs. + fn n_inputs(&self) -> usize; + + /// Returns the number of outputs. + fn n_outputs(&self) -> usize; + + /// Computes outputs from inputs. + /// + /// # Arguments + /// + /// * `inputs` - Input values (length = n_inputs) + /// + /// # Returns + /// + /// Output values (length = n_outputs) + fn compute(&self, inputs: &[f64]) -> Result, ExternalModelError>; + + /// Computes the Jacobian matrix. + /// + /// # Arguments + /// + /// * `inputs` - Input values + /// + /// # Returns + /// + /// Jacobian matrix as a flat array (row-major, n_outputs × n_inputs) + fn jacobian(&self, inputs: &[f64]) -> Result, ExternalModelError>; + + /// Returns model metadata. + fn metadata(&self) -> ExternalModelMetadata; +} + +/// Metadata about an external model. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ExternalModelMetadata { + /// Model name + pub name: String, + /// Model version + pub version: String, + /// Model description + pub description: Option, + /// Input names/units + pub input_names: Vec, + /// Output names/units + pub output_names: Vec, +} + +/// Errors from external model operations. +#[derive(Debug, Clone, thiserror::Error)] +pub enum ExternalModelError { + /// Library loading failed + #[error("Failed to load library: {0}")] + LibraryLoad(String), + + /// Function not found in library + #[error("Function not found: {0}")] + FunctionNotFound(String), + + /// Computation failed + #[error("Computation failed: {0}")] + ComputationFailed(String), + + /// Invalid input dimensions + #[error("Invalid input dimensions: expected {expected}, got {actual}")] + InvalidInputDimensions { + /// Expected number of inputs + expected: usize, + /// Actual number received + actual: usize, + }, + + /// HTTP request failed + #[error("HTTP request failed: {0}")] + HttpError(String), + + /// Timeout exceeded + #[error("Operation timed out after {0}ms")] + Timeout(u64), + + /// JSON parsing error + #[error("JSON error: {0}")] + JsonError(String), + + /// Model not initialized + #[error("Model not initialized")] + NotInitialized, +} + +impl From for ComponentError { + fn from(err: ExternalModelError) -> Self { + ComponentError::InvalidState(format!("External model error: {}", err)) + } +} + +/// Request body for HTTP compute endpoint. +#[derive(Debug, Serialize)] +#[allow(dead_code)] +struct ComputeRequest { + inputs: Vec, +} + +/// Response from HTTP compute endpoint. +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +struct ComputeResponse { + outputs: Vec, +} + +/// Request body for HTTP Jacobian endpoint. +#[derive(Debug, Serialize)] +#[allow(dead_code)] +struct JacobianRequest { + inputs: Vec, +} + +/// Response from HTTP Jacobian endpoint. +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +struct JacobianResponse { + jacobian: Vec, +} + +/// FFI-based external model (stub implementation). +/// +/// This is a placeholder that compiles without external dependencies. +/// Full FFI support requires the `libloading` crate and unsafe code. +#[cfg(not(feature = "ffi"))] +pub struct FfiModel { + config: ExternalModelConfig, + metadata: ExternalModelMetadata, +} + +#[cfg(not(feature = "ffi"))] +impl FfiModel { + /// Creates a new FFI model (stub - returns error without ffi feature). + pub fn new(_config: ExternalModelConfig) -> Result { + Err(ExternalModelError::NotInitialized) + } + + /// Creates with mock data for testing. + pub fn new_mock( + config: ExternalModelConfig, + metadata: ExternalModelMetadata, + ) -> Result { + Ok(Self { config, metadata }) + } +} + +#[cfg(not(feature = "ffi"))] +impl ExternalModel for FfiModel { + fn id(&self) -> &str { + &self.config.id + } + + fn n_inputs(&self) -> usize { + self.config.n_inputs + } + + fn n_outputs(&self) -> usize { + self.config.n_outputs + } + + fn compute(&self, _inputs: &[f64]) -> Result, ExternalModelError> { + // Stub implementation + Ok(vec![0.0; self.config.n_outputs]) + } + + fn jacobian(&self, _inputs: &[f64]) -> Result, ExternalModelError> { + // Stub implementation - returns identity matrix + let n = self.config.n_inputs * self.config.n_outputs; + Ok(vec![0.0; n]) + } + + fn metadata(&self) -> ExternalModelMetadata { + self.metadata.clone() + } +} + +/// HTTP-based external model (stub implementation). +/// +/// This is a placeholder that compiles without external dependencies. +/// Full HTTP support requires the `reqwest` crate. +#[cfg(not(feature = "http"))] +pub struct HttpModel { + config: ExternalModelConfig, + metadata: ExternalModelMetadata, +} + +#[cfg(not(feature = "http"))] +impl HttpModel { + /// Creates a new HTTP model (stub - returns error without http feature). + pub fn new(_config: ExternalModelConfig) -> Result { + Err(ExternalModelError::NotInitialized) + } + + /// Creates with mock data for testing. + pub fn new_mock( + config: ExternalModelConfig, + metadata: ExternalModelMetadata, + ) -> Result { + Ok(Self { config, metadata }) + } +} + +#[cfg(not(feature = "http"))] +impl ExternalModel for HttpModel { + fn id(&self) -> &str { + &self.config.id + } + + fn n_inputs(&self) -> usize { + self.config.n_inputs + } + + fn n_outputs(&self) -> usize { + self.config.n_outputs + } + + fn compute(&self, _inputs: &[f64]) -> Result, ExternalModelError> { + Ok(vec![0.0; self.config.n_outputs]) + } + + fn jacobian(&self, _inputs: &[f64]) -> Result, ExternalModelError> { + Ok(vec![0.0; self.config.n_inputs * self.config.n_outputs]) + } + + fn metadata(&self) -> ExternalModelMetadata { + self.metadata.clone() + } +} + +/// Thread-safe wrapper for external models. +/// +/// This wrapper ensures safe concurrent access to external models, +/// which may not be thread-safe themselves. +pub struct ThreadSafeExternalModel { + inner: Arc, +} + +impl ThreadSafeExternalModel { + /// Creates a new thread-safe wrapper. + pub fn new(model: impl ExternalModel + 'static) -> Self { + Self { + inner: Arc::new(model), + } + } + + /// Creates from an existing Arc. + pub fn from_arc(model: Arc) -> Self { + Self { inner: model } + } + + /// Returns a reference to the inner model. + pub fn inner(&self) -> &dyn ExternalModel { + self.inner.as_ref() + } +} + +impl Clone for ThreadSafeExternalModel { + fn clone(&self) -> Self { + Self { + inner: Arc::clone(&self.inner), + } + } +} + +impl std::fmt::Debug for ThreadSafeExternalModel { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ThreadSafeExternalModel") + .field("id", &self.inner.id()) + .finish() + } +} + +/// Mock external model for testing. +#[derive(Debug, Clone)] +pub struct MockExternalModel { + id: String, + n_inputs: usize, + n_outputs: usize, + compute_fn: fn(&[f64]) -> Vec, +} + +impl MockExternalModel { + /// Creates a new mock model. + pub fn new( + id: impl Into, + n_inputs: usize, + n_outputs: usize, + compute_fn: fn(&[f64]) -> Vec, + ) -> Self { + Self { + id: id.into(), + n_inputs, + n_outputs, + compute_fn, + } + } + + /// Creates a simple linear model: y = x + pub fn linear_passthrough(n: usize) -> Self { + Self::new("linear_passthrough", n, n, |x| x.to_vec()) + } + + /// Creates a model that doubles inputs. + pub fn doubler(n: usize) -> Self { + Self::new("doubler", n, n, |x| x.iter().map(|v| v * 2.0).collect()) + } +} + +impl ExternalModel for MockExternalModel { + fn id(&self) -> &str { + &self.id + } + + fn n_inputs(&self) -> usize { + self.n_inputs + } + + fn n_outputs(&self) -> usize { + self.n_outputs + } + + fn compute(&self, inputs: &[f64]) -> Result, ExternalModelError> { + if inputs.len() != self.n_inputs { + return Err(ExternalModelError::InvalidInputDimensions { + expected: self.n_inputs, + actual: inputs.len(), + }); + } + Ok((self.compute_fn)(inputs)) + } + + fn jacobian(&self, inputs: &[f64]) -> Result, ExternalModelError> { + // Default: finite difference approximation + let h = 1e-6; + let mut jacobian = vec![0.0; self.n_outputs * self.n_inputs]; + + for j in 0..self.n_inputs { + let mut inputs_plus = inputs.to_vec(); + let mut inputs_minus = inputs.to_vec(); + inputs_plus[j] += h; + inputs_minus[j] -= h; + + let y_plus = self.compute(&inputs_plus)?; + let y_minus = self.compute(&inputs_minus)?; + + for i in 0..self.n_outputs { + jacobian[i * self.n_inputs + j] = (y_plus[i] - y_minus[i]) / (2.0 * h); + } + } + + Ok(jacobian) + } + + fn metadata(&self) -> ExternalModelMetadata { + ExternalModelMetadata { + name: self.id.clone(), + version: "1.0.0".to_string(), + description: Some("Mock external model for testing".to_string()), + input_names: (0..self.n_inputs).map(|i| format!("input_{}", i)).collect(), + output_names: (0..self.n_outputs) + .map(|i| format!("output_{}", i)) + .collect(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_mock_external_model_compute() { + let model = MockExternalModel::doubler(3); + let result = model.compute(&[1.0, 2.0, 3.0]).unwrap(); + assert_eq!(result, vec![2.0, 4.0, 6.0]); + } + + #[test] + fn test_mock_external_model_dimensions() { + let model = MockExternalModel::doubler(3); + assert_eq!(model.n_inputs(), 3); + assert_eq!(model.n_outputs(), 3); + } + + #[test] + fn test_mock_external_model_invalid_input() { + let model = MockExternalModel::doubler(3); + let result = model.compute(&[1.0, 2.0]); + assert!(result.is_err()); + } + + #[test] + fn test_mock_external_model_jacobian() { + let model = MockExternalModel::doubler(2); + let jac = model.jacobian(&[1.0, 2.0]).unwrap(); + + // Jacobian of y = 2x should be [[2, 0], [0, 2]] + assert!((jac[0] - 2.0).abs() < 0.01); + assert!((jac[1] - 0.0).abs() < 0.01); + assert!((jac[2] - 0.0).abs() < 0.01); + assert!((jac[3] - 2.0).abs() < 0.01); + } + + #[test] + fn test_thread_safe_wrapper() { + let model = MockExternalModel::doubler(2); + let wrapped = ThreadSafeExternalModel::new(model); + + let result = wrapped.inner().compute(&[1.0, 2.0]).unwrap(); + assert_eq!(result, vec![2.0, 4.0]); + } + + #[test] + fn test_thread_safe_clone() { + let model = MockExternalModel::doubler(2); + let wrapped = ThreadSafeExternalModel::new(model); + let cloned = wrapped.clone(); + + assert_eq!(wrapped.inner().id(), cloned.inner().id()); + } + + #[test] + fn test_external_model_metadata() { + let model = MockExternalModel::doubler(2); + let meta = model.metadata(); + + assert_eq!(meta.name, "doubler"); + assert_eq!(meta.version, "1.0.0"); + assert_eq!(meta.input_names, vec!["input_0", "input_1"]); + assert_eq!(meta.output_names, vec!["output_0", "output_1"]); + } + + #[test] + fn test_linear_passthrough_model() { + let model = MockExternalModel::linear_passthrough(3); + let result = model.compute(&[1.0, 2.0, 3.0]).unwrap(); + assert_eq!(result, vec![1.0, 2.0, 3.0]); + } + + #[test] + fn test_external_model_config() { + let config = ExternalModelConfig { + id: "test_model".to_string(), + model_type: ExternalModelType::Http { + base_url: "http://localhost:8080".to_string(), + api_key: Some("secret".to_string()), + }, + n_inputs: 4, + n_outputs: 2, + timeout_ms: 3000, + }; + + assert_eq!(config.id, "test_model"); + assert_eq!(config.n_inputs, 4); + assert_eq!(config.n_outputs, 2); + assert_eq!(config.timeout_ms, 3000); + } + + #[test] + fn test_error_conversion() { + let err = ExternalModelError::ComputationFailed("test error".to_string()); + let component_err: ComponentError = err.into(); + + match component_err { + ComponentError::InvalidState(msg) => { + assert!(msg.contains("External model error")); + } + _ => panic!("Expected InvalidState error"), + } + } +} diff --git a/crates/components/Cargo.toml b/crates/components/Cargo.toml index 4b9f637..dc0bae6 100644 --- a/crates/components/Cargo.toml +++ b/crates/components/Cargo.toml @@ -7,9 +7,17 @@ description = "Core component trait definitions for Entropyk thermodynamic simul license = "MIT OR Apache-2.0" repository = "https://github.com/entropyk/entropyk" +[features] +default = [] +ffi = [] +http = [] + [dependencies] -# Core types will be added when core crate is created -# entropyk-core = { path = "../core" } +# Core types from Story 1.2 +entropyk-core = { path = "../core" } + +# Fluid properties backend (Story 5.1 - FluidBackend integration) +entropyk-fluids = { path = "../fluids" } # Error handling thiserror = "1.0" @@ -18,8 +26,8 @@ thiserror = "1.0" serde = { version = "1.0", features = ["derive"] } [dev-dependencies] -# Testing utilities -# tokio-test = "0.4" +# Floating-point assertions +approx = "0.5" [lib] name = "entropyk_components" diff --git a/crates/components/src/compressor.rs b/crates/components/src/compressor.rs new file mode 100644 index 0000000..6cab6c1 --- /dev/null +++ b/crates/components/src/compressor.rs @@ -0,0 +1,2018 @@ +//! Compressor Component Implementation (AHRI 540 Standard + SST/SDT Curves) +//! +//! This module provides a compressor component implementation based on the +//! AHRI 540 standard for performance rating of positive displacement +//! refrigerant compressors and compressor units. +//! +//! ## AHRI 540 Standard Equations +//! +//! The compressor is modeled using 10 coefficients (M1-M10): +//! +//! **Mass Flow Rate:** +//! ```text +//! ṁ = M1 × (1 - (P_suction/P_discharge)^(1/M2)) × ρ_suction × V_disp × N/60 +//! ``` +//! +//! **Power Consumption (Cooling):** +//! ```text +//! Ẇ = M3 + M4 × (P_discharge/P_suction) + M5 × T_suction + M6 × T_discharge +//! ``` +//! +//! **Power Consumption (Heating):** +//! ```text +//! Ẇ = M7 + M8 × (P_discharge/P_suction) + M9 × T_suction + M10 × T_discharge +//! ``` +//! +//! ## SST/SDT Polynomial Model +//! +//! Alternative model based on saturated temperatures: +//! +//! **Mass Flow Rate:** +//! ```text +//! ṁ = Σ a_ij × SST^i × SDT^j +//! ``` +//! +//! **Power Consumption:** +//! ```text +//! Ẇ = Σ b_ij × SST^i × SDT^j +//! ``` +//! +//! Where: +//! - SST = Saturated Suction Temperature (K) +//! - SDT = Saturated Discharge Temperature (K) + +use crate::polynomials::Polynomial2D; +use crate::port::{Connected, Disconnected, FluidId, Port}; +use crate::{ + CircuitId, Component, ComponentError, ConnectedPort, JacobianBuilder, OperationalState, + ResidualVector, SystemState, +}; +use entropyk_core::{Calib, Enthalpy, MassFlow, Temperature}; +use serde::{Deserialize, Serialize}; +use std::marker::PhantomData; + +/// Coefficients for AHRI 540 compressor performance model. +/// +/// The AHRI 540 standard defines 10 coefficients (M1-M10) that characterize +/// compressor performance across operating conditions. +#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] +pub struct Ahri540Coefficients { + /// Flow coefficient (M1) + pub m1: f64, + /// Pressure ratio exponent (M2) + pub m2: f64, + /// Power coefficient - constant term for cooling (M3) + pub m3: f64, + /// Power coefficient - pressure ratio term for cooling (M4) + pub m4: f64, + /// Power coefficient - suction temperature term for cooling (M5) + pub m5: f64, + /// Power coefficient - discharge temperature term for cooling (M6) + pub m6: f64, + /// Power coefficient - constant term for heating (M7) + pub m7: f64, + /// Power coefficient - pressure ratio term for heating (M8) + pub m8: f64, + /// Power coefficient - suction temperature term for heating (M9) + pub m9: f64, + /// Power coefficient - discharge temperature term for heating (M10) + pub m10: f64, +} + +impl Ahri540Coefficients { + /// Creates a new set of AHRI 540 coefficients. + #[allow(clippy::too_many_arguments)] + /// + /// # Arguments + /// + /// * `m1` - Flow coefficient + /// * `m2` - Pressure ratio exponent + /// * `m3` - Power constant (cooling) + /// * `m4` - Power pressure ratio coefficient (cooling) + /// * `m5` - Power suction temperature coefficient (cooling) + /// * `m6` - Power discharge temperature coefficient (cooling) + /// * `m7` - Power constant (heating) + /// * `m8` - Power pressure ratio coefficient (heating) + /// * `m9` - Power suction temperature coefficient (heating) + /// * `m10` - Power discharge temperature coefficient (heating) + /// + /// # Example + /// + /// ``` + /// use entropyk_components::compressor::Ahri540Coefficients; + /// + /// let coeffs = Ahri540Coefficients::new( + /// 0.85, 2.5, // M1, M2 (flow) + /// 500.0, 1500.0, -2.5, 1.8, // M3-M6 (cooling) + /// 600.0, 1600.0, -3.0, 2.0 // M7-M10 (heating) + /// ); + /// ``` + pub fn new( + m1: f64, + m2: f64, + m3: f64, + m4: f64, + m5: f64, + m6: f64, + m7: f64, + m8: f64, + m9: f64, + m10: f64, + ) -> Self { + Self { + m1, + m2, + m3, + m4, + m5, + m6, + m7, + m8, + m9, + m10, + } + } + + /// Validates that coefficients are within reasonable ranges. + /// + /// Returns an error if any coefficient is NaN or infinite, + /// or if critical coefficients are outside expected ranges. + pub fn validate(&self) -> Result<(), ComponentError> { + // Check for NaN or infinite values + let coefficients = [ + ("M1", self.m1), + ("M2", self.m2), + ("M3", self.m3), + ("M4", self.m4), + ("M5", self.m5), + ("M6", self.m6), + ("M7", self.m7), + ("M8", self.m8), + ("M9", self.m9), + ("M10", self.m10), + ]; + + for (name, value) in coefficients.iter() { + if value.is_nan() { + return Err(ComponentError::InvalidState(format!( + "Coefficient {} is NaN", + name + ))); + } + if value.is_infinite() { + return Err(ComponentError::InvalidState(format!( + "Coefficient {} is infinite", + name + ))); + } + } + + // M2 should be positive (pressure ratio exponent) + if self.m2 <= 0.0 { + return Err(ComponentError::InvalidState( + "Coefficient M2 (pressure ratio exponent) must be positive".to_string(), + )); + } + + Ok(()) + } +} + +/// Polynomial coefficients for SST/SDT based compressor model. +/// +/// This model characterizes compressor performance as a function of +/// saturated suction temperature (SST) and saturated discharge temperature (SDT). +/// +/// ## Model Equations +/// +/// **Mass Flow Rate:** +/// ```text +/// ṁ = Σ a_ij × SST^i × SDT^j (kg/s) +/// ``` +/// +/// **Power Consumption:** +/// ```text +/// Ẇ = Σ b_ij × SST^i × SDT^j (W) +/// ``` +/// +/// # Example +/// +/// ``` +/// use entropyk_components::compressor::SstSdtCoefficients; +/// +/// // Simple bilinear model: m_dot = a00 + a10*SST + a01*SDT + a11*SST*SDT +/// let coeffs = SstSdtCoefficients::bilinear( +/// 0.05, 0.001, 0.0005, 0.00001, // mass flow coefficients +/// 1000.0, 50.0, 30.0, 0.5 // power coefficients +/// ); +/// ``` +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct SstSdtCoefficients { + /// Mass flow rate polynomial: ṁ = f(SST, SDT) in kg/s + pub mass_flow_curve: Polynomial2D, + /// Power consumption polynomial: Ẇ = f(SST, SDT) in W + pub power_curve: Polynomial2D, +} + +impl SstSdtCoefficients { + /// Creates new SST/SDT coefficients from 2D polynomials. + pub fn new(mass_flow_curve: Polynomial2D, power_curve: Polynomial2D) -> Self { + Self { + mass_flow_curve, + power_curve, + } + } + + /// Creates a bilinear SST/SDT model. + /// + /// mass_flow = a00 + a10*SST + a01*SDT + a11*SST*SDT + /// power = b00 + b10*SST + b01*SDT + b11*SST*SDT + #[allow(clippy::too_many_arguments)] + pub fn bilinear( + mass_a00: f64, + mass_a10: f64, + mass_a01: f64, + mass_a11: f64, + power_b00: f64, + power_b10: f64, + power_b01: f64, + power_b11: f64, + ) -> Self { + Self { + mass_flow_curve: Polynomial2D::bilinear(mass_a00, mass_a10, mass_a01, mass_a11), + power_curve: Polynomial2D::bilinear(power_b00, power_b10, power_b01, power_b11), + } + } + + /// Creates a biquadratic SST/SDT model (degree 2 in both variables). + pub fn biquadratic(mass_coeffs: [[f64; 3]; 3], power_coeffs: [[f64; 3]; 3]) -> Self { + let mass_vec: Vec> = mass_coeffs.iter().map(|row| row.to_vec()).collect(); + let power_vec: Vec> = power_coeffs.iter().map(|row| row.to_vec()).collect(); + + Self { + mass_flow_curve: Polynomial2D::new(mass_vec), + power_curve: Polynomial2D::new(power_vec), + } + } + + /// Calculates mass flow rate at given SST and SDT. + /// + /// # Arguments + /// + /// * `sst_k` - Saturated suction temperature in Kelvin + /// * `sdt_k` - Saturated discharge temperature in Kelvin + /// + /// # Returns + /// + /// Mass flow rate in kg/s + pub fn mass_flow_at(&self, sst_k: f64, sdt_k: f64) -> f64 { + self.mass_flow_curve.evaluate(sst_k, sdt_k) + } + + /// Calculates power consumption at given SST and SDT. + /// + /// # Arguments + /// + /// * `sst_k` - Saturated suction temperature in Kelvin + /// * `sdt_k` - Saturated discharge temperature in Kelvin + /// + /// # Returns + /// + /// Power consumption in Watts + pub fn power_at(&self, sst_k: f64, sdt_k: f64) -> f64 { + self.power_curve.evaluate(sst_k, sdt_k) + } + + /// Validates that coefficients are within reasonable ranges. + pub fn validate(&self) -> Result<(), ComponentError> { + self.mass_flow_curve.validate()?; + self.power_curve.validate()?; + Ok(()) + } +} + +/// Compressor performance model selection. +/// +/// Allows switching between AHRI 540 coefficients and SST/SDT polynomial model. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub enum CompressorModel { + /// AHRI 540 standard model with 10 coefficients + Ahri540(Ahri540Coefficients), + /// SST/SDT polynomial model + SstSdt(SstSdtCoefficients), +} + +impl Default for CompressorModel { + fn default() -> Self { + CompressorModel::Ahri540(Ahri540Coefficients::new( + 0.85, 2.5, 500.0, 1500.0, -2.5, 1.8, 600.0, 1600.0, -3.0, 2.0, + )) + } +} + +impl From for CompressorModel { + fn from(coeffs: Ahri540Coefficients) -> Self { + CompressorModel::Ahri540(coeffs) + } +} + +impl From for CompressorModel { + fn from(coeffs: SstSdtCoefficients) -> Self { + CompressorModel::SstSdt(coeffs) + } +} +/// +/// The compressor uses the Type-State pattern to ensure ports are connected +/// before use in simulations. It implements the [`Component`] trait for +/// integration with the solver. +/// +/// # Type Parameters +/// +/// * `State` - Either `Disconnected` or `Connected`, tracking connection state +/// +/// # Example +/// +/// ``` +/// use entropyk_components::compressor::{Compressor, Ahri540Coefficients}; +/// use entropyk_components::port::{FluidId, Port}; +/// use entropyk_core::{Pressure, Enthalpy}; +/// +/// // Create coefficients +/// let coeffs = Ahri540Coefficients::new( +/// 0.85, 2.5, +/// 500.0, 1500.0, -2.5, 1.8, +/// 600.0, 1600.0, -3.0, 2.0 +/// ); +/// +/// // Create disconnected ports (Compressor::new accepts Disconnected ports) +/// let suction = Port::new( +/// FluidId::new("R134a"), +/// Pressure::from_bar(3.5), +/// Enthalpy::from_joules_per_kg(400000.0) +/// ); +/// let discharge = Port::new( +/// FluidId::new("R134a"), +/// Pressure::from_bar(3.5), // Same pressure for validation +/// Enthalpy::from_joules_per_kg(400000.0) // Same enthalpy for validation +/// ); +/// +/// // Create compressor with disconnected ports +/// let compressor = Compressor::new( +/// coeffs, +/// suction, +/// discharge, +/// 2900.0, // RPM +/// 0.0001, // Displacement volume (m³/rev) +/// 0.85 // Mechanical efficiency +/// ).unwrap(); +/// ``` +#[derive(Debug, Clone)] +pub struct Compressor { + /// Compressor performance model (AHRI 540 or SST/SDT polynomial) + model: CompressorModel, + /// Suction port (inlet) + port_suction: Port, + /// Discharge port (outlet) + port_discharge: Port, + /// Rotational speed in RPM + speed_rpm: f64, + /// Displacement volume in m³/revolution + displacement_m3_per_rev: f64, + /// Mechanical efficiency (0.0 to 1.0) + mechanical_efficiency: f64, + /// Calibration factors: ṁ_eff = f_m × ṁ_nominal, Ẇ_eff = f_power × Ẇ_nominal, etc. + calib: Calib, + /// Fluid identifier for density lookups + fluid_id: FluidId, + /// Circuit identifier for multi-circuit machines (FR9) + circuit_id: CircuitId, + /// Operational state: On, Off, or Bypass (FR6-FR8) + operational_state: OperationalState, + /// Phantom data for type state + _state: PhantomData, +} + +impl Compressor { + /// Returns both ports as a slice for solver topology. + /// + /// # Note + /// + /// This creates a temporary array on each call. For better performance + /// in hot loops, cache the ports directly via `port_suction()` and + /// `port_discharge()` methods. + pub fn get_ports_slice(&self) -> [&Port; 2] { + [&self.port_suction, &self.port_discharge] + } +} + +impl Compressor { + /// Creates a new disconnected compressor with AHRI 540 model. + /// + /// The compressor must have its ports connected before use in simulations. + /// + /// # Arguments + /// + /// * `coefficients` - AHRI 540 performance coefficients + /// * `port_suction` - Suction port (disconnected) + /// * `port_discharge` - Discharge port (disconnected) + /// * `speed_rpm` - Rotational speed in RPM + /// * `displacement_m3_per_rev` - Displacement volume in m³/rev + /// * `mechanical_efficiency` - Mechanical efficiency (0.0 to 1.0) + /// + /// # Errors + /// + /// Returns an error if: + /// - Coefficients are invalid + /// - Speed is negative or zero + /// - Displacement is negative or zero + /// - Mechanical efficiency is outside [0, 1] + /// - Ports have different fluid types + pub fn new( + coefficients: Ahri540Coefficients, + port_suction: Port, + port_discharge: Port, + speed_rpm: f64, + displacement_m3_per_rev: f64, + mechanical_efficiency: f64, + ) -> Result { + Self::with_model( + CompressorModel::Ahri540(coefficients), + port_suction, + port_discharge, + speed_rpm, + displacement_m3_per_rev, + mechanical_efficiency, + ) + } + + /// Creates a new disconnected compressor with a specified model. + /// + /// Use this constructor to select between AHRI 540 and SST/SDT polynomial models. + /// + /// # Arguments + /// + /// * `model` - Compressor performance model (AHRI 540 or SST/SDT) + /// * `port_suction` - Suction port (disconnected) + /// * `port_discharge` - Discharge port (disconnected) + /// * `speed_rpm` - Rotational speed in RPM + /// * `displacement_m3_per_rev` - Displacement volume in m³/rev + /// * `mechanical_efficiency` - Mechanical efficiency (0.0 to 1.0) + /// + /// # Errors + /// + /// Returns an error if: + /// - Model coefficients are invalid + /// - Speed is negative or zero + /// - Displacement is negative or zero + /// - Mechanical efficiency is outside [0, 1] + /// - Ports have different fluid types + /// + /// # Example + /// + /// ``` + /// use entropyk_components::compressor::{Compressor, CompressorModel, SstSdtCoefficients}; + /// use entropyk_components::port::{FluidId, Port}; + /// use entropyk_core::{Pressure, Enthalpy}; + /// + /// // Create SST/SDT polynomial model + /// let sst_sdt = SstSdtCoefficients::bilinear( + /// 0.05, 0.001, 0.0005, 0.00001, // mass flow coefficients + /// 1000.0, 50.0, 30.0, 0.5 // power coefficients + /// ); + /// + /// let suction = Port::new( + /// FluidId::new("R134a"), + /// Pressure::from_bar(3.5), + /// Enthalpy::from_joules_per_kg(400000.0) + /// ); + /// let discharge = Port::new( + /// FluidId::new("R134a"), + /// Pressure::from_bar(10.0), + /// Enthalpy::from_joules_per_kg(450000.0) + /// ); + /// + /// // Create compressor with SST/SDT model + /// let compressor = Compressor::with_model( + /// CompressorModel::SstSdt(sst_sdt), + /// suction, + /// discharge, + /// 2900.0, // RPM + /// 0.0001, // Displacement volume (m³/rev) + /// 0.85 // Mechanical efficiency + /// ).unwrap(); + /// ``` + pub fn with_model( + model: CompressorModel, + port_suction: Port, + port_discharge: Port, + speed_rpm: f64, + displacement_m3_per_rev: f64, + mechanical_efficiency: f64, + ) -> Result { + // Validate model coefficients + match &model { + CompressorModel::Ahri540(coeffs) => coeffs.validate()?, + CompressorModel::SstSdt(coeffs) => coeffs.validate()?, + } + + // Validate speed + if speed_rpm <= 0.0 { + return Err(ComponentError::InvalidState( + "Compressor speed must be positive".to_string(), + )); + } + + // Validate displacement + if displacement_m3_per_rev <= 0.0 { + return Err(ComponentError::InvalidState( + "Displacement volume must be positive".to_string(), + )); + } + + // Validate efficiency + if !(0.0..=1.0).contains(&mechanical_efficiency) { + return Err(ComponentError::InvalidState( + "Mechanical efficiency must be between 0.0 and 1.0".to_string(), + )); + } + + // Validate fluid compatibility + if port_suction.fluid_id() != port_discharge.fluid_id() { + return Err(ComponentError::InvalidState( + "Suction and discharge ports must have the same fluid type".to_string(), + )); + } + + let fluid_id = port_suction.fluid_id().clone(); + + Ok(Self { + model, + port_suction, + port_discharge, + speed_rpm, + displacement_m3_per_rev, + mechanical_efficiency, + calib: Calib::default(), + fluid_id, + circuit_id: CircuitId::default(), // Default circuit + operational_state: OperationalState::default(), // Default to On + _state: PhantomData, + }) + } + + /// Returns the fluid identifier. + pub fn fluid_id(&self) -> &FluidId { + &self.fluid_id + } + + /// Returns the suction port. + pub fn port_suction(&self) -> &Port { + &self.port_suction + } + + /// Returns the discharge port. + pub fn port_discharge(&self) -> &Port { + &self.port_discharge + } + + /// Returns the rotational speed in RPM. + pub fn speed_rpm(&self) -> f64 { + self.speed_rpm + } + + /// Returns the displacement volume in m³/rev. + pub fn displacement_m3_per_rev(&self) -> f64 { + self.displacement_m3_per_rev + } + + /// Returns the mechanical efficiency. + pub fn mechanical_efficiency(&self) -> f64 { + self.mechanical_efficiency + } + + /// Returns the compressor model (AHRI 540 or SST/SDT). + pub fn model(&self) -> &CompressorModel { + &self.model + } + + /// Returns the AHRI 540 coefficients if using AHRI 540 model. + /// + /// # Returns + /// + /// `Some(&Ahri540Coefficients)` if the model is AHRI 540, `None` otherwise. + pub fn ahri540_coefficients(&self) -> Option<&Ahri540Coefficients> { + match &self.model { + CompressorModel::Ahri540(coeffs) => Some(coeffs), + _ => None, + } + } + + /// Returns the SST/SDT coefficients if using SST/SDT polynomial model. + /// + /// # Returns + /// + /// `Some(&SstSdtCoefficients)` if the model is SST/SDT, `None` otherwise. + pub fn sst_sdt_coefficients(&self) -> Option<&SstSdtCoefficients> { + match &self.model { + CompressorModel::SstSdt(coeffs) => Some(coeffs), + _ => None, + } + } + + /// Returns calibration factors (f_m, f_power, etc.). + pub fn calib(&self) -> &Calib { + &self.calib + } + + /// Sets calibration factors. + pub fn set_calib(&mut self, calib: Calib) { + self.calib = calib; + } + + /// Returns the circuit identifier. + pub fn circuit_id(&self) -> &CircuitId { + &self.circuit_id + } + + /// Sets the circuit identifier. + pub fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.circuit_id = circuit_id; + } + + /// Returns the operational state. + pub fn operational_state(&self) -> OperationalState { + self.operational_state + } + + /// Sets the operational state. + pub fn set_operational_state(&mut self, state: OperationalState) { + self.operational_state = state; + } +} + +impl Compressor { + /// Returns the suction port. + pub fn port_suction(&self) -> &Port { + &self.port_suction + } + + /// Returns the discharge port. + pub fn port_discharge(&self) -> &Port { + &self.port_discharge + } + + /// Computes the full thermodynamic state at the suction port. + pub fn suction_state(&self, backend: &impl entropyk_fluids::FluidBackend) -> Result { + backend + .full_state( + entropyk_fluids::FluidId::new(self.port_suction.fluid_id().as_str()), + self.port_suction.pressure(), + self.port_suction.enthalpy(), + ) + .map_err(|e| ComponentError::CalculationFailed(format!("Failed to compute suction state: {}", e))) + } + + /// Computes the full thermodynamic state at the discharge port. + pub fn discharge_state(&self, backend: &impl entropyk_fluids::FluidBackend) -> Result { + backend + .full_state( + entropyk_fluids::FluidId::new(self.port_discharge.fluid_id().as_str()), + self.port_discharge.pressure(), + self.port_discharge.enthalpy(), + ) + .map_err(|e| ComponentError::CalculationFailed(format!("Failed to compute discharge state: {}", e))) + } + + /// Calculates the mass flow rate through the compressor. + /// + /// Uses the selected model (AHRI 540 or SST/SDT): + /// - AHRI 540: ṁ = M1 × (1 - (P_suction/P_discharge)^(1/M2)) × ρ_suction × V_disp × N/60 + /// - SST/SDT: ṁ = Σ a_ij × SST^i × SDT^j + /// + /// # Arguments + /// + /// * `density_suction` - Suction gas density in kg/m³ (used for AHRI 540 model) + /// * `sst_k` - Saturated suction temperature in Kelvin (used for SST/SDT model) + /// * `sdt_k` - Saturated discharge temperature in Kelvin (used for SST/SDT model) + /// + /// # Returns + /// + /// Returns the mass flow rate as [`MassFlow`]. + /// + /// # Errors + /// + /// Returns an error if the calculation results in numerical errors + /// (e.g., negative pressure ratio, division by zero). + pub fn mass_flow_rate( + &self, + density_suction: f64, + sst_k: f64, + sdt_k: f64, + ) -> Result { + if density_suction < 0.0 { + return Err(ComponentError::InvalidState( + "Suction density cannot be negative".to_string(), + )); + } + + let p_suction = self.port_suction.pressure().to_pascals(); + let p_discharge = self.port_discharge.pressure().to_pascals(); + + // Validate pressures + if p_suction <= 0.0 { + return Err(ComponentError::NumericalError( + "Suction pressure must be positive".to_string(), + )); + } + + if p_discharge <= 0.0 { + return Err(ComponentError::NumericalError( + "Discharge pressure must be positive".to_string(), + )); + } + + let mass_flow_kg_per_s = match &self.model { + CompressorModel::Ahri540(coeffs) => { + // Calculate volumetric efficiency using inverse pressure ratio + // η_vol = 1 - (P_suction/P_discharge)^(1/M2) + let inverse_pressure_ratio = p_suction / p_discharge; + let volumetric_efficiency = 1.0 - inverse_pressure_ratio.powf(1.0 / coeffs.m2); + + if volumetric_efficiency < 0.0 { + return Err(ComponentError::NumericalError( + "Volumetric efficiency is negative - check pressure ratio and M2 coefficient" + .to_string(), + )); + } + + // Convert RPM to rev/s + let speed_rev_per_s = self.speed_rpm / 60.0; + + // Calculate mass flow (AHRI 540 nominal) + coeffs.m1 + * volumetric_efficiency + * density_suction + * self.displacement_m3_per_rev + * speed_rev_per_s + } + CompressorModel::SstSdt(coeffs) => { + // SST/SDT polynomial model + coeffs.mass_flow_at(sst_k, sdt_k) + } + }; + + // Apply calibration: ṁ_eff = f_m × ṁ_nominal + Ok(MassFlow::from_kg_per_s(mass_flow_kg_per_s * self.calib.f_m)) + } + + /// Calculates the power consumption (cooling mode). + /// + /// Uses the selected model: + /// - AHRI 540: Ẇ = M3 + M4 × (P_discharge/P_suction) + M5 × T_suction + M6 × T_discharge + /// - SST/SDT: Ẇ = Σ b_ij × SST^i × SDT^j + /// + /// # Arguments + /// + /// * `t_suction` - Suction temperature + /// * `t_discharge` - Discharge temperature + /// + /// # Returns + /// + /// Returns the power consumption in Watts. + pub fn power_consumption_cooling( + &self, + t_suction: Temperature, + t_discharge: Temperature, + ) -> f64 { + let power_nominal = match &self.model { + CompressorModel::Ahri540(coeffs) => { + let pressure_ratio = self.port_discharge.pressure().to_pascals() + / self.port_suction.pressure().to_pascals(); + coeffs.m3 + + coeffs.m4 * pressure_ratio + + coeffs.m5 * t_suction.to_kelvin() + + coeffs.m6 * t_discharge.to_kelvin() + } + CompressorModel::SstSdt(coeffs) => { + coeffs.power_at(t_suction.to_kelvin(), t_discharge.to_kelvin()) + } + }; + // Ẇ_eff = f_power × Ẇ_nominal + power_nominal * self.calib.f_power + } + + /// Calculates the power consumption (heating mode). + /// + /// Uses the selected model: + /// - AHRI 540: Ẇ = M7 + M8 × (P_discharge/P_suction) + M9 × T_suction + M10 × T_discharge + /// - SST/SDT: Same as cooling mode (SST/SDT model doesn't distinguish modes) + /// + /// # Arguments + /// + /// * `t_suction` - Suction temperature + /// * `t_discharge` - Discharge temperature + /// + /// # Returns + /// + /// Returns the power consumption in Watts. + pub fn power_consumption_heating( + &self, + t_suction: Temperature, + t_discharge: Temperature, + ) -> f64 { + let power_nominal = match &self.model { + CompressorModel::Ahri540(coeffs) => { + let pressure_ratio = self.port_discharge.pressure().to_pascals() + / self.port_suction.pressure().to_pascals(); + coeffs.m7 + + coeffs.m8 * pressure_ratio + + coeffs.m9 * t_suction.to_kelvin() + + coeffs.m10 * t_discharge.to_kelvin() + } + CompressorModel::SstSdt(coeffs) => { + // SST/SDT model doesn't distinguish between cooling and heating + coeffs.power_at(t_suction.to_kelvin(), t_discharge.to_kelvin()) + } + }; + // Ẇ_eff = f_power × Ẇ_nominal + power_nominal * self.calib.f_power + } + + /// Calculates the cooling capacity. + /// + /// Q̇_cool = ṁ × (h_evap_out - h_evap_in) + /// + /// # Arguments + /// + /// * `mass_flow` - Mass flow rate + /// * `h_evap_in` - Evaporator inlet enthalpy + /// * `h_evap_out` - Evaporator outlet enthalpy + /// + /// # Returns + /// + /// Returns the cooling capacity in Watts. + pub fn cooling_capacity( + &self, + mass_flow: MassFlow, + h_evap_in: Enthalpy, + h_evap_out: Enthalpy, + ) -> f64 { + mass_flow.to_kg_per_s() * (h_evap_out.to_joules_per_kg() - h_evap_in.to_joules_per_kg()) + } + + /// Calculates the heating capacity. + /// + /// Q̇_heat = ṁ × (h_cond_out - h_cond_in) + /// + /// # Arguments + /// + /// * `mass_flow` - Mass flow rate + /// * `h_cond_in` - Condenser inlet enthalpy + /// * `h_cond_out` - Condenser outlet enthalpy + /// + /// # Returns + /// + /// Returns the heating capacity in Watts. + pub fn heating_capacity( + &self, + mass_flow: MassFlow, + h_cond_in: Enthalpy, + h_cond_out: Enthalpy, + ) -> f64 { + mass_flow.to_kg_per_s() * (h_cond_out.to_joules_per_kg() - h_cond_in.to_joules_per_kg()) + } + + /// Calculates the Coefficient of Performance (COP). + /// + /// COP = Q̇ / Ẇ + /// + /// # Arguments + /// + /// * `capacity` - Cooling or heating capacity in Watts + /// * `power` - Power consumption in Watts + /// + /// # Returns + /// + /// Returns the COP. Returns an error if power is zero or negative. + pub fn coefficient_of_performance( + &self, + capacity: f64, + power: f64, + ) -> Result { + if power <= 0.0 { + return Err(ComponentError::NumericalError( + "Power must be positive for COP calculation".to_string(), + )); + } + Ok(capacity / power) + } + + /// Returns the rotational speed in RPM. + pub fn speed_rpm(&self) -> f64 { + self.speed_rpm + } + + /// Returns the displacement volume in m³/rev. + pub fn displacement_m3_per_rev(&self) -> f64 { + self.displacement_m3_per_rev + } + + /// Returns the mechanical efficiency. + pub fn mechanical_efficiency(&self) -> f64 { + self.mechanical_efficiency + } + + /// Returns the compressor model (AHRI 540 or SST/SDT). + pub fn model(&self) -> &CompressorModel { + &self.model + } + + /// Returns the AHRI 540 coefficients if using AHRI 540 model. + /// + /// # Returns + /// + /// `Some(&Ahri540Coefficients)` if the model is AHRI 540, `None` otherwise. + pub fn ahri540_coefficients(&self) -> Option<&Ahri540Coefficients> { + match &self.model { + CompressorModel::Ahri540(coeffs) => Some(coeffs), + _ => None, + } + } + + /// Returns the SST/SDT coefficients if using SST/SDT polynomial model. + /// + /// # Returns + /// + /// `Some(&SstSdtCoefficients)` if the model is SST/SDT, `None` otherwise. + pub fn sst_sdt_coefficients(&self) -> Option<&SstSdtCoefficients> { + match &self.model { + CompressorModel::SstSdt(coeffs) => Some(coeffs), + _ => None, + } + } + + /// Returns calibration factors (f_m, f_power, etc.). + pub fn calib(&self) -> &Calib { + &self.calib + } + + /// Sets calibration factors. + pub fn set_calib(&mut self, calib: Calib) { + self.calib = calib; + } + + /// Returns the circuit identifier. + pub fn circuit_id(&self) -> &CircuitId { + &self.circuit_id + } + + /// Sets the circuit identifier. + pub fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.circuit_id = circuit_id; + } + + /// Returns the operational state. + pub fn operational_state(&self) -> OperationalState { + self.operational_state + } + + /// Sets the operational state. + pub fn set_operational_state(&mut self, state: OperationalState) { + self.operational_state = state; + } +} + +impl Component for Compressor { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + // Validate residual vector length + if residuals.len() != self.n_equations() { + return Err(ComponentError::InvalidResidualDimensions { + expected: self.n_equations(), + actual: residuals.len(), + }); + } + + // Handle operational states (FR6-FR8) + match self.operational_state { + OperationalState::Off => { + // In Off state, mass flow is zero (FR7) + residuals[0] = state[0]; // ṁ = 0 + residuals[1] = 0.0; // No energy transfer + return Ok(()); + } + OperationalState::Bypass => { + // In Bypass state, behaves as adiabatic pipe (FR8) + // P_in = P_out and h_in = h_out + let p_suction = self.port_suction.pressure().to_pascals(); + let p_discharge = self.port_discharge.pressure().to_pascals(); + let h_suction = self.port_suction.enthalpy().to_joules_per_kg(); + let h_discharge = self.port_discharge.enthalpy().to_joules_per_kg(); + + residuals[0] = p_suction - p_discharge; // Pressure continuity + residuals[1] = h_suction - h_discharge; // Enthalpy continuity (adiabatic) + return Ok(()); + } + OperationalState::On => { + // Normal operation - continue with AHRI 540 calculations + } + } + + // Validate state vector has minimum required dimensions + // We need at least 4 values: mass_flow, h_suction, h_discharge, power + if state.len() < 4 { + return Err(ComponentError::InvalidStateDimensions { + expected: 4, + actual: state.len(), + }); + } + + // Extract state variables + let mass_flow_state = state[0]; // kg/s + let h_suction = state[1]; // J/kg + let h_discharge = state[2]; // J/kg + let _power_state = state[3]; // W + + // Get port values + let p_suction = self.port_suction.pressure().to_pascals(); + let p_discharge = self.port_discharge.pressure().to_pascals(); + + // Calculate temperatures for SST/SDT model + let t_suction_k = estimate_temperature(self.fluid_id.as_str(), p_suction, h_suction)?; + let t_discharge_k = estimate_temperature(self.fluid_id.as_str(), p_discharge, h_discharge)?; + + // Calculate mass flow from selected model + // For now, we use a simplified density calculation + // In the future, this will come from the fluid property backend + let density_suction = estimate_density(self.fluid_id.as_str(), p_suction, h_suction)?; + let mass_flow_calc = self + .mass_flow_rate(density_suction, t_suction_k, t_discharge_k)? + .to_kg_per_s(); + + // Calculate power consumption + let power_calc = self.power_consumption_cooling( + Temperature::from_kelvin(t_suction_k), + Temperature::from_kelvin(t_discharge_k), + ); + + // Residual 0: Mass flow continuity + // ṁ_calc - ṁ_state = 0 + residuals[0] = mass_flow_calc - mass_flow_state; + + // Residual 1: Energy balance + // Power_calc - ṁ × (h_discharge - h_suction) / η_mech = 0 + let enthalpy_change = h_discharge - h_suction; + residuals[1] = power_calc - mass_flow_state * enthalpy_change / self.mechanical_efficiency; + + Ok(()) + } + + fn jacobian_entries( + &self, + state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + // Validate state vector + if state.len() < 4 { + return Err(ComponentError::InvalidStateDimensions { + expected: 4, + actual: state.len(), + }); + } + + // Extract state variables + let mass_flow_state = state[0]; + let h_suction = state[1]; + let h_discharge = state[2]; + + // Get port values + let p_suction = self.port_suction.pressure().to_pascals(); + let p_discharge = self.port_discharge.pressure().to_pascals(); + + // Calculate temperatures for SST/SDT model + let _t_suction_k = + estimate_temperature(self.fluid_id.as_str(), p_suction, h_suction).unwrap_or(273.15); + let t_discharge_k = estimate_temperature(self.fluid_id.as_str(), p_discharge, h_discharge) + .unwrap_or(273.15); + + // Row 0: Mass flow residual + // ∂r₀/∂ṁ = -1 + jacobian.add_entry(0, 0, -1.0); + + // ∂r₀/∂h_suction - requires density derivative + // For now, use finite difference approximation + let dr0_dh_suction = approximate_derivative( + |h| { + let density = estimate_density(self.fluid_id.as_str(), p_suction, h).unwrap_or(1.0); + let t_k = + estimate_temperature(self.fluid_id.as_str(), p_suction, h).unwrap_or(273.15); + self.mass_flow_rate(density, t_k, t_discharge_k) + .map(|m| m.to_kg_per_s()) + .unwrap_or(0.0) + }, + h_suction, + 1.0, + ); + jacobian.add_entry(0, 1, dr0_dh_suction); + + // ∂r₀/∂h_discharge = 0 (mass flow doesn't depend on discharge enthalpy) + jacobian.add_entry(0, 2, 0.0); + + // ∂r₀/∂Power = 0 + jacobian.add_entry(0, 3, 0.0); + + // Row 1: Energy residual + // ∂r₁/∂ṁ = -(h_discharge - h_suction) / η_mech + let dr1_dm = -(h_discharge - h_suction) / self.mechanical_efficiency; + jacobian.add_entry(1, 0, dr1_dm); + + // ∂r₁/∂h_suction - includes power derivative and mass flow term + let dr1_dh_suction = approximate_derivative( + |h| { + let t = estimate_temperature(self.fluid_id.as_str(), p_suction, h).unwrap_or(300.0); + let t_discharge = + estimate_temperature(self.fluid_id.as_str(), p_discharge, h_discharge) + .unwrap_or(350.0); + self.power_consumption_cooling( + Temperature::from_kelvin(t), + Temperature::from_kelvin(t_discharge), + ) + }, + h_suction, + 1.0, + ) + mass_flow_state / self.mechanical_efficiency; + jacobian.add_entry(1, 1, dr1_dh_suction); + + // ∂r₁/∂h_discharge - includes power derivative and mass flow term + let dr1_dh_discharge = approximate_derivative( + |h| { + let t_suction = estimate_temperature(self.fluid_id.as_str(), p_suction, h_suction) + .unwrap_or(300.0); + let t = + estimate_temperature(self.fluid_id.as_str(), p_discharge, h).unwrap_or(350.0); + self.power_consumption_cooling( + Temperature::from_kelvin(t_suction), + Temperature::from_kelvin(t), + ) + }, + h_discharge, + 1.0, + ) - mass_flow_state / self.mechanical_efficiency; + jacobian.add_entry(1, 2, dr1_dh_discharge); + + // ∂r₁/∂Power = -1 + jacobian.add_entry(1, 3, -1.0); + + Ok(()) + } + + fn n_equations(&self) -> usize { + 2 // Mass flow residual and energy residual + } + + fn get_ports(&self) -> &[ConnectedPort] { + // NOTE: This returns an empty slice due to lifetime constraints. + // Use `get_ports_slice()` method on Compressor for actual port access. + // This is a known limitation - the Component trait needs redesign for proper port access. + &[] + } +} + +use crate::state_machine::StateManageable; + +impl StateManageable for Compressor { + fn state(&self) -> OperationalState { + self.operational_state + } + + fn set_state(&mut self, state: OperationalState) -> Result<(), ComponentError> { + if self.operational_state.can_transition_to(state) { + let from = self.operational_state; + self.operational_state = state; + self.on_state_change(from, state); + Ok(()) + } else { + Err(ComponentError::InvalidStateTransition { + from: self.operational_state, + to: state, + reason: "Transition not allowed".to_string(), + }) + } + } + + fn can_transition_to(&self, target: OperationalState) -> bool { + self.operational_state.can_transition_to(target) + } + + fn circuit_id(&self) -> &CircuitId { + &self.circuit_id + } + + fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.circuit_id = circuit_id; + } +} + +/// Estimates fluid density from pressure and enthalpy. +/// +/// **PLACEHOLDER IMPLEMENTATION** - Will be replaced by CoolProp integration +/// in Story 2.2 (Fluid Properties Backend). Current implementation uses +/// rough approximations for R134a, R410A, and R454B. +/// +/// # Arguments +/// +/// * `fluid_id` - Fluid identifier (e.g., "R134a") +/// * `pressure` - Pressure in Pascals +/// * `enthalpy` - Specific enthalpy in J/kg +/// +/// # Returns +/// +/// Returns the estimated density in kg/m³. +fn estimate_density(fluid_id: &str, _pressure: f64, enthalpy: f64) -> Result { + // Placeholder: simple estimation based on enthalpy for common refrigerants + // This should be replaced with proper fluid property calculations + match fluid_id { + "R134a" => { + // Rough approximation for R134a at typical conditions + // h ≈ 400 kJ/kg, ρ ≈ 20 kg/m³ (vapor) + // h ≈ 250 kJ/kg, ρ ≈ 1200 kg/m³ (liquid) + let density = if enthalpy > 350000.0 { + 20.0 // Superheated vapor + } else if enthalpy < 200000.0 { + 1200.0 // Subcooled liquid + } else { + // Linear interpolation in two-phase region + 20.0 + (1200.0 - 20.0) * (350000.0 - enthalpy) / 150000.0 + }; + Ok(density) + } + "R410A" | "R454B" => { + // Similar approximation for R410A and R454B (R454B is close to R410A properties) + let density = if enthalpy > 380000.0 { + 25.0 + } else if enthalpy < 220000.0 { + 1100.0 + } else { + 25.0 + (1100.0 - 25.0) * (380000.0 - enthalpy) / 160000.0 + }; + Ok(density) + } + _ => Err(ComponentError::InvalidState(format!( + "Unknown fluid: {}", + fluid_id + ))), + } +} + +/// Estimates fluid temperature from pressure and enthalpy. +/// +/// **PLACEHOLDER IMPLEMENTATION** - Will be replaced by CoolProp integration +/// in Story 2.2 (Fluid Properties Backend). Current implementation uses +/// rough approximations for R134a, R410A, and R454B. +/// +/// # Arguments +/// +/// * `fluid_id` - Fluid identifier (e.g., "R134a") +/// * `pressure` - Pressure in Pascals +/// * `enthalpy` - Specific enthalpy in J/kg +/// +/// # Returns +/// +/// Returns the estimated temperature in Kelvin. +fn estimate_temperature( + fluid_id: &str, + pressure: f64, + enthalpy: f64, +) -> Result { + // Placeholder: simple estimation based on pressure and enthalpy + match fluid_id { + "R134a" => { + // Rough approximation using ideal gas law as baseline + // T = h / cp (roughly) + let cp = 1200.0; // J/(kg·K) - approximate specific heat + let temperature_from_h = enthalpy / cp; + + // Adjust based on pressure (saturation temperature approximation) + // For R134a, saturation pressure correlation is roughly: + // log10(P) ≈ A - B/(T + C) + // We'll use a simplified approximation + let p_bar = pressure / 100000.0; + let t_sat_k = if p_bar > 0.1 { + // Simplified saturation temperature for R134a + 250.0 + 50.0 * (p_bar / 10.0).ln() + } else { + 250.0 + }; + + // Return the higher of the two (superheated or saturated) + Ok(temperature_from_h.max(t_sat_k)) + } + "R410A" | "R454B" => { + // R454B is similar to R410A for temperature estimation + let cp = 1300.0; + let temperature_from_h = enthalpy / cp; + let p_bar = pressure / 100000.0; + let t_sat_k = if p_bar > 0.1 { + 240.0 + 45.0 * (p_bar / 15.0).ln() + } else { + 240.0 + }; + Ok(temperature_from_h.max(t_sat_k)) + } + _ => Err(ComponentError::InvalidState(format!( + "Unknown fluid: {}", + fluid_id + ))), + } +} + +/// Approximates the derivative of a function using finite differences. +/// +/// Uses a central difference approximation: +/// f'(x) ≈ (f(x + h) - f(x - h)) / (2h) +fn approximate_derivative(f: F, x: f64, h: f64) -> f64 +where + F: Fn(f64) -> f64, +{ + (f(x + h) - f(x - h)) / (2.0 * h) +} + +#[cfg(test)] +mod tests { + use super::*; + use approx::assert_relative_eq; + use entropyk_core::Pressure; + + // Test coefficients representing a typical small compressor + fn test_coefficients() -> Ahri540Coefficients { + Ahri540Coefficients::new( + 0.85, // M1: Flow coefficient + 2.5, // M2: Pressure ratio exponent (higher value allows reasonable pressure ratios) + 500.0, // M3: Power constant (cooling) + 1500.0, // M4: Power pressure ratio (cooling) + -2.5, // M5: Power suction temp (cooling) + 1.8, // M6: Power discharge temp (cooling) + 600.0, // M7: Power constant (heating) + 1600.0, // M8: Power pressure ratio (heating) + -3.0, // M9: Power suction temp (heating) + 2.0, // M10: Power discharge temp (heating) + ) + } + + fn create_test_compressor() -> Compressor { + let coeffs = test_coefficients(); + // Create ports with same initial pressure and enthalpy to allow connection + let suction = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(3.5), + Enthalpy::from_joules_per_kg(400000.0), + ); + let discharge = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(3.5), // Same pressure for connection + Enthalpy::from_joules_per_kg(400000.0), // Same enthalpy for connection + ); + let (suction_conn, mut discharge_conn) = suction.connect(discharge).unwrap(); + + // Modify discharge pressure and enthalpy after connection + // Use moderate pressure ratio (6/3.5 ≈ 1.71) to ensure positive volumetric efficiency + discharge_conn.set_pressure(Pressure::from_bar(6.0)); + discharge_conn.set_enthalpy(Enthalpy::from_joules_per_kg(450000.0)); + + Compressor { + model: CompressorModel::Ahri540(coeffs), + port_suction: suction_conn, + port_discharge: discharge_conn, + speed_rpm: 2900.0, + displacement_m3_per_rev: 0.0001, + mechanical_efficiency: 0.85, + calib: Calib::default(), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + operational_state: OperationalState::default(), + _state: PhantomData, + } + } + + #[test] + fn test_coefficient_creation() { + let coeffs = test_coefficients(); + assert_eq!(coeffs.m1, 0.85); + assert_eq!(coeffs.m2, 2.5); + assert_eq!(coeffs.m3, 500.0); + assert_eq!(coeffs.m10, 2.0); + } + + #[test] + fn test_coefficient_validation_valid() { + let coeffs = test_coefficients(); + assert!(coeffs.validate().is_ok()); + } + + #[test] + fn test_coefficient_validation_nan() { + let mut coeffs = test_coefficients(); + coeffs.m1 = f64::NAN; + assert!(coeffs.validate().is_err()); + } + + #[test] + fn test_coefficient_validation_infinite() { + let mut coeffs = test_coefficients(); + coeffs.m2 = f64::INFINITY; + assert!(coeffs.validate().is_err()); + } + + #[test] + fn test_coefficient_validation_negative_m2() { + let mut coeffs = test_coefficients(); + coeffs.m2 = -0.5; + assert!(coeffs.validate().is_err()); + } + + #[test] + fn test_disconnected_compressor_creation() { + let coeffs = test_coefficients(); + let suction = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(3.5), + Enthalpy::from_joules_per_kg(400000.0), + ); + let discharge = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(15.0), + Enthalpy::from_joules_per_kg(450000.0), + ); + + let compressor = Compressor::new(coeffs, suction, discharge, 2900.0, 0.0001, 0.85); + + assert!(compressor.is_ok()); + let comp = compressor.unwrap(); + assert_eq!(comp.speed_rpm(), 2900.0); + assert_eq!(comp.displacement_m3_per_rev(), 0.0001); + assert_eq!(comp.mechanical_efficiency(), 0.85); + } + + #[test] + fn test_compressor_creation_zero_speed() { + let coeffs = test_coefficients(); + let suction = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(3.5), + Enthalpy::from_joules_per_kg(400000.0), + ); + let discharge = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(15.0), + Enthalpy::from_joules_per_kg(450000.0), + ); + + let result = Compressor::new( + coeffs, suction, discharge, 0.0, // Invalid speed + 0.0001, 0.85, + ); + + assert!(result.is_err()); + } + + #[test] + fn test_compressor_creation_negative_displacement() { + let coeffs = test_coefficients(); + let suction = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(3.5), + Enthalpy::from_joules_per_kg(400000.0), + ); + let discharge = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(15.0), + Enthalpy::from_joules_per_kg(450000.0), + ); + + let result = Compressor::new( + coeffs, suction, discharge, 2900.0, -0.0001, // Invalid displacement + 0.85, + ); + + assert!(result.is_err()); + } + + #[test] + fn test_compressor_creation_invalid_efficiency() { + let coeffs = test_coefficients(); + let suction = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(3.5), + Enthalpy::from_joules_per_kg(400000.0), + ); + let discharge = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(15.0), + Enthalpy::from_joules_per_kg(450000.0), + ); + + let result = Compressor::new( + coeffs, suction, discharge, 2900.0, 0.0001, 1.5, // Invalid efficiency + ); + + assert!(result.is_err()); + } + + #[test] + fn test_compressor_creation_different_fluids() { + let coeffs = test_coefficients(); + let suction = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(3.5), + Enthalpy::from_joules_per_kg(400000.0), + ); + let discharge = Port::new( + FluidId::new("R410A"), // Different fluid + Pressure::from_bar(15.0), + Enthalpy::from_joules_per_kg(450000.0), + ); + + let result = Compressor::new(coeffs, suction, discharge, 2900.0, 0.0001, 0.85); + + assert!(result.is_err()); + } + + #[test] + fn test_mass_flow_calculation() { + let compressor = create_test_compressor(); + let density = 20.0; // kg/m³ (approximate vapor density) + let t_suction_k = 278.15; // 5°C in Kelvin + let t_discharge_k = 318.15; // 45°C in Kelvin + + let mass_flow = compressor + .mass_flow_rate(density, t_suction_k, t_discharge_k) + .unwrap(); + + // Verify mass flow is positive + assert!(mass_flow.to_kg_per_s() > 0.0); + + // Verify calculation: M1 * (1 - (P_suction/P_discharge)^(1/M2)) * rho * Vdisp * N/60 + // Using inverse pressure ratio from create_test_compressor (3.5/6.0) + let inverse_pressure_ratio: f64 = 3.5 / 6.0; + let volumetric_eff = 1.0 - inverse_pressure_ratio.powf(1.0 / 2.5); + let speed_rev_per_s = 2900.0 / 60.0; + let expected_mass_flow = 0.85 * volumetric_eff * density * 0.0001 * speed_rev_per_s; + + assert_relative_eq!(mass_flow.to_kg_per_s(), expected_mass_flow, epsilon = 1e-10); + } + + #[test] + fn test_f_m_scales_mass_flow() { + let mut compressor = create_test_compressor(); + let density = 20.0; + let t_suction_k = 278.15; // 5°C in Kelvin + let t_discharge_k = 318.15; // 45°C in Kelvin + let m_default = compressor + .mass_flow_rate(density, t_suction_k, t_discharge_k) + .unwrap() + .to_kg_per_s(); + + compressor.set_calib(Calib { + f_m: 1.1, + ..Calib::default() + }); + let m_calib = compressor + .mass_flow_rate(density, t_suction_k, t_discharge_k) + .unwrap() + .to_kg_per_s(); + assert_relative_eq!(m_calib / m_default, 1.1, epsilon = 1e-10); + } + + #[test] + fn test_f_power_scales_compressor_power() { + let mut compressor = create_test_compressor(); + let t_suction = Temperature::from_celsius(5.0); + let t_discharge = Temperature::from_celsius(45.0); + let p_default = compressor.power_consumption_cooling(t_suction, t_discharge); + + compressor.set_calib(Calib { + f_power: 1.1, + ..Calib::default() + }); + let p_calib = compressor.power_consumption_cooling(t_suction, t_discharge); + assert_relative_eq!(p_calib / p_default, 1.1, epsilon = 1e-10); + } + + #[test] + fn test_mass_flow_negative_density() { + let compressor = create_test_compressor(); + let t_suction_k = 278.15; // 5°C in Kelvin + let t_discharge_k = 318.15; // 45°C in Kelvin + let result = compressor.mass_flow_rate(-10.0, t_suction_k, t_discharge_k); + assert!(result.is_err()); + } + + #[test] + fn test_mass_flow_zero_suction_pressure() { + let coeffs = test_coefficients(); + let suction = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(0.0), // Zero pressure + Enthalpy::from_joules_per_kg(400000.0), + ); + let discharge = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(0.0), // Same zero pressure for connection + Enthalpy::from_joules_per_kg(400000.0), // Same enthalpy for connection + ); + let (suction_conn, mut discharge_conn) = suction.connect(discharge).unwrap(); + + // Modify discharge pressure and enthalpy after connection + discharge_conn.set_pressure(Pressure::from_bar(15.0)); + discharge_conn.set_enthalpy(Enthalpy::from_joules_per_kg(450000.0)); + + let compressor = Compressor { + model: CompressorModel::Ahri540(coeffs), + port_suction: suction_conn, + port_discharge: discharge_conn, + speed_rpm: 2900.0, + displacement_m3_per_rev: 0.0001, + mechanical_efficiency: 0.85, + calib: Calib::default(), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + operational_state: OperationalState::default(), + _state: PhantomData, + }; + + let t_suction_k = 278.15; // 5°C in Kelvin + let t_discharge_k = 318.15; // 45°C in Kelvin + let result = compressor.mass_flow_rate(20.0, t_suction_k, t_discharge_k); + assert!(result.is_err()); + } + + #[test] + fn test_power_consumption_cooling() { + let compressor = create_test_compressor(); + let t_suction = Temperature::from_celsius(5.0); + let t_discharge = Temperature::from_celsius(45.0); + + let power = compressor.power_consumption_cooling(t_suction, t_discharge); + + // Verify power is positive + assert!(power > 0.0); + + // Verify calculation: M3 + M4 * PR + M5 * T_suction + M6 * T_discharge + // Using 6.0/3.5 pressure ratio from create_test_compressor + let pressure_ratio: f64 = 6.0 / 3.5; + let expected_power = 500.0 + + 1500.0 * pressure_ratio + + (-2.5) * t_suction.to_kelvin() + + 1.8 * t_discharge.to_kelvin(); + + assert_relative_eq!(power, expected_power, epsilon = 1e-10); + } + + #[test] + fn test_power_consumption_heating() { + let compressor = create_test_compressor(); + let t_suction = Temperature::from_celsius(5.0); + let t_discharge = Temperature::from_celsius(45.0); + + let power = compressor.power_consumption_heating(t_suction, t_discharge); + + // Verify calculation: M7 + M8 * PR + M9 * T_suction + M10 * T_discharge + // Using 6.0/3.5 pressure ratio from create_test_compressor + let pressure_ratio: f64 = 6.0 / 3.5; + let expected_power = 600.0 + + 1600.0 * pressure_ratio + + (-3.0) * t_suction.to_kelvin() + + 2.0 * t_discharge.to_kelvin(); + + assert_relative_eq!(power, expected_power, epsilon = 1e-10); + } + + #[test] + fn test_cooling_capacity() { + let compressor = create_test_compressor(); + let mass_flow = MassFlow::from_kg_per_s(0.05); + let h_evap_in = Enthalpy::from_joules_per_kg(250000.0); + let h_evap_out = Enthalpy::from_joules_per_kg(400000.0); + + let capacity = compressor.cooling_capacity(mass_flow, h_evap_in, h_evap_out); + + // Q = ṁ * (h_out - h_in) + let expected_capacity = 0.05 * (400000.0 - 250000.0); + assert_relative_eq!(capacity, expected_capacity, epsilon = 1e-10); + } + + #[test] + fn test_heating_capacity() { + let compressor = create_test_compressor(); + let mass_flow = MassFlow::from_kg_per_s(0.05); + let h_cond_in = Enthalpy::from_joules_per_kg(450000.0); + let h_cond_out = Enthalpy::from_joules_per_kg(250000.0); + + let capacity = compressor.heating_capacity(mass_flow, h_cond_in, h_cond_out); + + // Q = ṁ * (h_out - h_in) + let expected_capacity = 0.05 * (250000.0 - 450000.0); // Negative for heating + assert_relative_eq!(capacity, expected_capacity, epsilon = 1e-10); + } + + #[test] + fn test_coefficient_of_performance() { + let compressor = create_test_compressor(); + let capacity = 5000.0; // W + let power = 2000.0; // W + + let cop = compressor + .coefficient_of_performance(capacity, power) + .unwrap(); + assert_relative_eq!(cop, 2.5, epsilon = 1e-10); + } + + #[test] + fn test_coefficient_of_performance_zero_power() { + let compressor = create_test_compressor(); + let result = compressor.coefficient_of_performance(5000.0, 0.0); + assert!(result.is_err()); + } + + #[test] + fn test_component_n_equations() { + let compressor = create_test_compressor(); + assert_eq!(compressor.n_equations(), 2); + } + + #[test] + fn test_component_compute_residuals() { + let compressor = create_test_compressor(); + let state = vec![0.05, 400000.0, 450000.0, 3500.0]; + let mut residuals = vec![0.0; 2]; + + let result = compressor.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + + // Verify residuals are calculated (actual values depend on fluid properties) + assert!(!residuals[0].is_nan()); + assert!(!residuals[1].is_nan()); + } + + #[test] + fn test_component_compute_residuals_wrong_size() { + let compressor = create_test_compressor(); + let state = vec![0.05, 400000.0, 450000.0, 3500.0]; + let mut residuals = vec![0.0; 3]; // Wrong size + + let result = compressor.compute_residuals(&state, &mut residuals); + assert!(result.is_err()); + } + + #[test] + fn test_component_jacobian_entries() { + let compressor = create_test_compressor(); + let state = vec![0.05, 400000.0, 450000.0, 3500.0]; + let mut jacobian = JacobianBuilder::new(); + + let result = compressor.jacobian_entries(&state, &mut jacobian); + assert!(result.is_ok()); + + // Should have at least some entries + assert!(jacobian.len() > 0); + } + + #[test] + fn test_estimate_density_r134a() { + let density = estimate_density("R134a", 350000.0, 400000.0).unwrap(); + // At high enthalpy (superheated), should be around 20 kg/m³ + assert!(density > 0.0); + assert!(density < 100.0); + } + + #[test] + fn test_estimate_temperature_r134a() { + let temp = estimate_temperature("R134a", 350000.0, 400000.0).unwrap(); + assert!(temp > 200.0); + assert!(temp < 500.0); + } + + #[test] + fn test_estimate_unknown_fluid() { + let result = estimate_density("UnknownFluid", 350000.0, 400000.0); + assert!(result.is_err()); + } + + #[test] + fn test_approximate_derivative() { + let f = |x: f64| x * x; + let derivative = approximate_derivative(f, 2.0, 0.001); + // f'(x) = 2x, so at x=2, f'(2) = 4 + assert_relative_eq!(derivative, 4.0, epsilon = 1e-6); + } + + #[test] + fn test_compressor_with_r410a() { + let coeffs = test_coefficients(); + let suction = Port::new( + FluidId::new("R410A"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(380000.0), + ); + let discharge = Port::new( + FluidId::new("R410A"), + Pressure::from_bar(10.0), // Same pressure for connection + Enthalpy::from_joules_per_kg(380000.0), // Same enthalpy for connection + ); + let (suction_conn, mut discharge_conn) = suction.connect(discharge).unwrap(); + + // Modify discharge pressure and enthalpy after connection + // Use moderate pressure ratio (18/10 = 1.8) to ensure positive volumetric efficiency + discharge_conn.set_pressure(Pressure::from_bar(18.0)); + discharge_conn.set_enthalpy(Enthalpy::from_joules_per_kg(430000.0)); + + let compressor = Compressor { + model: CompressorModel::Ahri540(coeffs), + port_suction: suction_conn, + port_discharge: discharge_conn, + speed_rpm: 3600.0, + displacement_m3_per_rev: 0.00008, + mechanical_efficiency: 0.88, + calib: Calib::default(), + fluid_id: FluidId::new("R410A"), + circuit_id: CircuitId::default(), + operational_state: OperationalState::default(), + _state: PhantomData, + }; + + let density = 25.0; // kg/m³ + let t_suction_k = 283.15; // 10°C in Kelvin + let t_discharge_k = 323.15; // 50°C in Kelvin + let mass_flow = compressor + .mass_flow_rate(density, t_suction_k, t_discharge_k) + .unwrap(); + assert!(mass_flow.to_kg_per_s() > 0.0); + + let t_suction = Temperature::from_celsius(10.0); + let t_discharge = Temperature::from_celsius(50.0); + let power = compressor.power_consumption_cooling(t_suction, t_discharge); + assert!(power > 0.0); + } + + #[test] + fn test_compressor_with_r454b() { + // R454B (Opteon XL41) is a low-GWP replacement for R410A + let coeffs = test_coefficients(); + let suction = Port::new( + FluidId::new("R454B"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(380000.0), + ); + let discharge = Port::new( + FluidId::new("R454B"), + Pressure::from_bar(10.0), // Same pressure for connection + Enthalpy::from_joules_per_kg(380000.0), // Same enthalpy for connection + ); + let (suction_conn, mut discharge_conn) = suction.connect(discharge).unwrap(); + + // Modify discharge pressure and enthalpy after connection + discharge_conn.set_pressure(Pressure::from_bar(18.0)); + discharge_conn.set_enthalpy(Enthalpy::from_joules_per_kg(430000.0)); + + let compressor = Compressor { + model: CompressorModel::Ahri540(coeffs), + port_suction: suction_conn, + port_discharge: discharge_conn, + speed_rpm: 3600.0, + displacement_m3_per_rev: 0.00008, + mechanical_efficiency: 0.88, + calib: Calib::default(), + fluid_id: FluidId::new("R454B"), + circuit_id: CircuitId::default(), + operational_state: OperationalState::default(), + _state: PhantomData, + }; + + // R454B should use R410A properties as approximation + let density = 25.0; // kg/m³ + let t_suction_k = 283.15; // 10°C in Kelvin + let t_discharge_k = 323.15; // 50°C in Kelvin + let mass_flow = compressor + .mass_flow_rate(density, t_suction_k, t_discharge_k) + .unwrap(); + assert!(mass_flow.to_kg_per_s() > 0.0); + + let t_suction = Temperature::from_celsius(10.0); + let t_discharge = Temperature::from_celsius(50.0); + let power = compressor.power_consumption_cooling(t_suction, t_discharge); + assert!(power > 0.0); + } + + #[test] + fn test_mass_flow_with_high_pressure_ratio() { + let coeffs = Ahri540Coefficients::new( + 0.85, 2.5, // M1, M2 + 500.0, 1500.0, -2.5, 1.8, // M3-M6 + 600.0, 1600.0, -3.0, 2.0, // M7-M10 + ); + let suction = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(1.0), // Low suction pressure + Enthalpy::from_joules_per_kg(400000.0), + ); + let discharge = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(1.0), // Same pressure for connection + Enthalpy::from_joules_per_kg(400000.0), // Same enthalpy for connection + ); + let (suction_conn, mut discharge_conn) = suction.connect(discharge).unwrap(); + + // Modify discharge pressure and enthalpy after connection + discharge_conn.set_pressure(Pressure::from_bar(30.0)); + discharge_conn.set_enthalpy(Enthalpy::from_joules_per_kg(450000.0)); + + let compressor = Compressor { + model: CompressorModel::Ahri540(coeffs), + port_suction: suction_conn, + port_discharge: discharge_conn, + speed_rpm: 2900.0, + displacement_m3_per_rev: 0.0001, + mechanical_efficiency: 0.85, + calib: Calib::default(), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + operational_state: OperationalState::default(), + _state: PhantomData, + }; + + let density = 20.0; + let t_suction_k = 283.15; // 10°C in Kelvin + let t_discharge_k = 323.15; // 50°C in Kelvin + // With high pressure ratio, volumetric efficiency might be negative + // depending on M2 value + let result = compressor.mass_flow_rate(density, t_suction_k, t_discharge_k); + // This may fail due to negative volumetric efficiency + // which is expected behavior + if result.is_ok() { + let mass_flow = result.unwrap(); + assert!(mass_flow.to_kg_per_s() >= 0.0); + } + } + + #[test] + fn test_compressor_clone() { + let compressor = create_test_compressor(); + let cloned = compressor.clone(); + + assert_eq!(compressor.speed_rpm(), cloned.speed_rpm()); + assert_eq!( + compressor.displacement_m3_per_rev(), + cloned.displacement_m3_per_rev() + ); + assert_eq!( + compressor.mechanical_efficiency(), + cloned.mechanical_efficiency() + ); + } + + #[test] + fn test_state_manageable_state() { + let compressor = create_test_compressor(); + assert_eq!(compressor.state(), OperationalState::On); + } + + #[test] + fn test_state_manageable_set_state_on_to_off() { + let mut compressor = create_test_compressor(); + let result = compressor.set_state(OperationalState::Off); + assert!(result.is_ok()); + assert_eq!(compressor.state(), OperationalState::Off); + } + + #[test] + fn test_state_manageable_set_state_on_to_bypass() { + let mut compressor = create_test_compressor(); + let result = compressor.set_state(OperationalState::Bypass); + assert!(result.is_ok()); + assert_eq!(compressor.state(), OperationalState::Bypass); + } + + #[test] + fn test_state_manageable_can_transition_to() { + let compressor = create_test_compressor(); + assert!(compressor.can_transition_to(OperationalState::Off)); + assert!(compressor.can_transition_to(OperationalState::Bypass)); + assert!(compressor.can_transition_to(OperationalState::On)); + } + + #[test] + fn test_state_manageable_circuit_id() { + let compressor = create_test_compressor(); + assert_eq!(compressor.circuit_id().as_str(), "default"); + } + + #[test] + fn test_state_manageable_set_circuit_id() { + let mut compressor = create_test_compressor(); + compressor.set_circuit_id(CircuitId::new("primary")); + assert_eq!(compressor.circuit_id().as_str(), "primary"); + } +} diff --git a/crates/components/src/expansion_valve.rs b/crates/components/src/expansion_valve.rs new file mode 100644 index 0000000..22f1bb1 --- /dev/null +++ b/crates/components/src/expansion_valve.rs @@ -0,0 +1,1434 @@ +//! Expansion Valve Component Implementation +//! +//! This module provides an expansion valve component that models isenthalpic +//! expansion in refrigeration systems. The expansion valve reduces pressure +//! while maintaining constant enthalpy (throttling process). +//! +//! ## Thermodynamic Model +//! +//! The expansion valve is modeled as an isenthalpic device: +//! ```text +//! h_out = h_in (enthalpy conservation - isenthalpic) +//! ṁ_out = ṁ_in (mass flow continuity) +//! P_out < P_in (pressure drop - throttling) +//! W = 0 (no work done) +//! Q = 0 (adiabatic) +//! ``` +//! +//! ## Operational States +//! +//! - **On**: Normal expansion with isenthalpic process +//! - **Off**: Zero mass flow through the valve +//! - **Bypass**: Acts as adiabatic pipe (P_in = P_out, h_in = h_out) +//! +//! ## Example +//! +//! ```rust +//! use entropyk_components::expansion_valve::ExpansionValve; +//! use entropyk_components::port::{FluidId, Port}; +//! use entropyk_core::{Pressure, Enthalpy}; +//! +//! // Create disconnected ports +//! let inlet = Port::new( +//! FluidId::new("R134a"), +//! Pressure::from_bar(10.0), +//! Enthalpy::from_joules_per_kg(250000.0) +//! ); +//! let outlet = Port::new( +//! FluidId::new("R134a"), +//! Pressure::from_bar(10.0), +//! Enthalpy::from_joules_per_kg(250000.0) +//! ); +//! +//! // Create expansion valve +//! let valve = ExpansionValve::new(inlet, outlet, None).unwrap(); +//! ``` + +use crate::port::{Connected, Disconnected, FluidId, Port}; +use crate::{ + CircuitId, Component, ComponentError, ConnectedPort, JacobianBuilder, OperationalState, + ResidualVector, SystemState, +}; +use entropyk_core::Calib; +use std::marker::PhantomData; + +const OPENING_THRESHOLD: f64 = 0.01; + +const ENTHALPY_TOLERANCE_J_KG: f64 = 100.0; + +const MIN_STATE_DIMENSIONS: usize = 2; + +fn is_effectively_off_impl(operational_state: OperationalState, opening: Option) -> bool { + operational_state == OperationalState::Off || opening.is_some_and(|o| o < OPENING_THRESHOLD) +} + +/// Expansion valve component for modeling isenthalpic expansion. +/// +/// The expansion valve is a throttling device that reduces pressure while +/// maintaining constant enthalpy (isenthalpic process). It implements the +/// [`Component`] trait for integration with the solver. +/// +/// # Type Parameters +/// +/// * `State` - Either `Disconnected` or `Connected`, tracking connection state +/// +/// # Example +/// +/// ```rust +/// use entropyk_components::expansion_valve::ExpansionValve; +/// use entropyk_components::port::{FluidId, Port}; +/// use entropyk_core::{Pressure, Enthalpy}; +/// +/// // Create disconnected ports +/// let inlet = Port::new( +/// FluidId::new("R134a"), +/// Pressure::from_bar(10.0), +/// Enthalpy::from_joules_per_kg(250000.0) +/// ); +/// let outlet = Port::new( +/// FluidId::new("R134a"), +/// Pressure::from_bar(10.0), +/// Enthalpy::from_joules_per_kg(250000.0) +/// ); +/// +/// // Create expansion valve with optional opening parameter +/// let valve = ExpansionValve::new(inlet, outlet, Some(1.0)).unwrap(); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct ExpansionValve { + port_inlet: Port, + port_outlet: Port, + /// Calibration: ṁ_eff = f_m × ṁ_nominal (mass flow scaling) + calib: Calib, + operational_state: OperationalState, + opening: Option, + fluid_id: FluidId, + circuit_id: CircuitId, + _state: PhantomData, +} + +impl ExpansionValve { + /// Creates a new disconnected expansion valve. + /// + /// # Arguments + /// + /// * `port_inlet` - Inlet port (high pressure, subcooled liquid) + /// * `port_outlet` - Outlet port (low pressure, two-phase) + /// * `opening` - Optional opening parameter (0.0 = closed, 1.0 = fully open) + /// + /// # Errors + /// + /// Returns an error if: + /// - Opening is outside [0.0, 1.0] range + /// - Opening is NaN or infinite + /// - Ports have different fluid types + pub fn new( + port_inlet: Port, + port_outlet: Port, + opening: Option, + ) -> Result { + if let Some(o) = opening { + if !(0.0..=1.0).contains(&o) { + return Err(ComponentError::InvalidState(format!( + "Opening must be between 0.0 and 1.0, got {}", + o + ))); + } + if o.is_nan() || o.is_infinite() { + return Err(ComponentError::InvalidState( + "Opening must be a finite number".to_string(), + )); + } + } + + if port_inlet.fluid_id() != port_outlet.fluid_id() { + return Err(ComponentError::InvalidState( + "Inlet and outlet ports must have the same fluid type".to_string(), + )); + } + + let fluid_id = port_inlet.fluid_id().clone(); + + Ok(Self { + port_inlet, + port_outlet, + calib: Calib::default(), + operational_state: OperationalState::default(), + opening, + fluid_id, + circuit_id: CircuitId::default(), + _state: PhantomData, + }) + } + + /// Returns the fluid identifier. + pub fn fluid_id(&self) -> &FluidId { + &self.fluid_id + } + + /// Returns the inlet port. + pub fn port_inlet(&self) -> &Port { + &self.port_inlet + } + + /// Returns the outlet port. + pub fn port_outlet(&self) -> &Port { + &self.port_outlet + } + + /// Returns the optional opening parameter (0.0 to 1.0). + pub fn opening(&self) -> Option { + self.opening + } + + /// Returns the current operational state. + pub fn operational_state(&self) -> OperationalState { + self.operational_state + } + + /// Sets the operational state. + pub fn set_operational_state(&mut self, state: OperationalState) { + self.operational_state = state; + } + + /// Returns the circuit identifier. + pub fn circuit_id(&self) -> &CircuitId { + &self.circuit_id + } + + /// Sets the circuit identifier. + pub fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.circuit_id = circuit_id; + } + + /// Returns calibration factors (f_m for mass flow scaling). + pub fn calib(&self) -> &Calib { + &self.calib + } + + /// Sets calibration factors. + pub fn set_calib(&mut self, calib: Calib) { + self.calib = calib; + } + + /// Returns true if the valve is effectively off. + /// + /// The valve is effectively off when: + /// - Operational state is Off, or + /// - Opening is below threshold (< 1%) + pub fn is_effectively_off(&self) -> bool { + is_effectively_off_impl(self.operational_state, self.opening) + } +} + +/// Phase region at a thermodynamic state point. +#[derive(Debug, Clone, Copy, PartialEq)] +pub enum PhaseRegion { + /// Subcooled liquid (below saturation line) + Subcooled, + /// Two-phase mixture (between saturated liquid and vapor) + TwoPhase, + /// Superheated vapor (above saturation line) + Superheated, +} + +impl PhaseRegion { + /// Returns true if the region is two-phase. + pub fn is_two_phase(self) -> bool { + self == PhaseRegion::TwoPhase + } +} + +impl ExpansionValve { + /// Returns the inlet port. + pub fn port_inlet(&self) -> &Port { + &self.port_inlet + } + + /// Returns the outlet port. + pub fn port_outlet(&self) -> &Port { + &self.port_outlet + } + + /// Computes the full thermodynamic state at the inlet port. + pub fn inlet_state(&self, backend: &impl entropyk_fluids::FluidBackend) -> Result { + backend + .full_state( + entropyk_fluids::FluidId::new(self.port_inlet.fluid_id().as_str()), + self.port_inlet.pressure(), + self.port_inlet.enthalpy(), + ) + .map_err(|e| ComponentError::CalculationFailed(format!("Failed to compute inlet state: {}", e))) + } + + /// Computes the full thermodynamic state at the outlet port. + pub fn outlet_state(&self, backend: &impl entropyk_fluids::FluidBackend) -> Result { + backend + .full_state( + entropyk_fluids::FluidId::new(self.port_outlet.fluid_id().as_str()), + self.port_outlet.pressure(), + self.port_outlet.enthalpy(), + ) + .map_err(|e| ComponentError::CalculationFailed(format!("Failed to compute outlet state: {}", e))) + } + + /// Returns the optional opening parameter (0.0 to 1.0). + pub fn opening(&self) -> Option { + self.opening + } + + /// Returns the current operational state. + pub fn operational_state(&self) -> OperationalState { + self.operational_state + } + + /// Sets the operational state. + pub fn set_operational_state(&mut self, state: OperationalState) { + self.operational_state = state; + } + + /// Returns the circuit identifier. + pub fn circuit_id(&self) -> &CircuitId { + &self.circuit_id + } + + /// Sets the circuit identifier. + pub fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.circuit_id = circuit_id; + } + + /// Returns the fluid identifier. + pub fn fluid_id(&self) -> &FluidId { + &self.fluid_id + } + + /// Returns calibration factors (f_m for mass flow scaling). + pub fn calib(&self) -> &Calib { + &self.calib + } + + /// Sets calibration factors. + pub fn set_calib(&mut self, calib: Calib) { + self.calib = calib; + } + + /// Returns true if the valve is effectively off. + /// + /// The valve is effectively off when: + /// - Operational state is Off, or + /// - Opening is below threshold (< 1%) + pub fn is_effectively_off(&self) -> bool { + is_effectively_off_impl(self.operational_state, self.opening) + } + + /// Sets the valve opening parameter. + /// + /// # Arguments + /// + /// * `opening` - New opening value (0.0 = closed, 1.0 = fully open), or None + /// + /// # Errors + /// + /// Returns an error if opening is outside [0.0, 1.0] range or is NaN/infinite. + pub fn set_opening(&mut self, opening: Option) -> Result<(), ComponentError> { + if let Some(o) = opening { + if !(0.0..=1.0).contains(&o) { + return Err(ComponentError::InvalidState(format!( + "Opening must be between 0.0 and 1.0, got {}", + o + ))); + } + if o.is_nan() || o.is_infinite() { + return Err(ComponentError::InvalidState( + "Opening must be a finite number".to_string(), + )); + } + } + self.opening = opening; + Ok(()) + } + + /// Returns both ports as an array for solver topology. + pub fn get_ports_slice(&self) -> [&Port; 2] { + [&self.port_inlet, &self.port_outlet] + } + + /// Validates that the process is isenthalpic (h_in = h_out). + /// + /// # Returns + /// + /// Returns `Ok(true)` if inlet and outlet enthalpies are equal within tolerance. + pub fn validate_isenthalpic(&self) -> Result { + let h_in = self.port_inlet.enthalpy().to_joules_per_kg(); + let h_out = self.port_outlet.enthalpy().to_joules_per_kg(); + + if h_in.is_nan() || h_out.is_nan() { + return Err(ComponentError::NumericalError( + "Enthalpy contains NaN value".to_string(), + )); + } + + Ok((h_in - h_out).abs() < ENTHALPY_TOLERANCE_J_KG) + } + + /// Validates that outlet pressure is lower than inlet pressure. + /// + /// # Returns + /// + /// Returns `Ok(true)` if P_out < P_in, indicating a pressure drop. + pub fn validate_pressure_drop(&self) -> Result { + let p_in = self.port_inlet.pressure().to_pascals(); + let p_out = self.port_outlet.pressure().to_pascals(); + + if p_in <= 0.0 { + return Err(ComponentError::NumericalError( + "Inlet pressure must be positive".to_string(), + )); + } + if p_out <= 0.0 { + return Err(ComponentError::NumericalError( + "Outlet pressure must be positive".to_string(), + )); + } + + Ok(p_out < p_in) + } + + /// Returns the pressure ratio (P_out / P_in). + /// + /// A value less than 1.0 indicates a pressure drop through the valve. + pub fn pressure_ratio(&self) -> f64 { + let p_in = self.port_inlet.pressure().to_pascals(); + let p_out = self.port_outlet.pressure().to_pascals(); + if p_in > 0.0 { + p_out / p_in + } else { + 0.0 + } + } + + /// Detects the phase region of the outlet port based on pressure and enthalpy. + /// + /// This method determines if the outlet is in subcooled, two-phase, or superheated + /// region by comparing against saturation enthalpy values at the outlet pressure. + /// + /// # Arguments + /// + /// * `h_f` - Saturated liquid enthalpy at outlet pressure (J/kg) + /// * `h_g` - Saturated vapor enthalpy at outlet pressure (J/kg) + /// + /// # Returns + /// + /// The phase region at the outlet. + pub fn detect_phase_region(&self, h_f: f64, h_g: f64) -> PhaseRegion { + let h_out = self.port_outlet.enthalpy().to_joules_per_kg(); + + if h_out < h_f { + PhaseRegion::Subcooled + } else if h_out > h_g { + PhaseRegion::Superheated + } else { + PhaseRegion::TwoPhase + } + } + + /// Calculates the vapor quality at the outlet if in two-phase region. + /// + /// Quality is defined as: x = (h - h_f) / (h_g - h_f) + /// - x = 0: Saturated liquid + /// - x = 1: Saturated vapor + /// - 0 < x < 1: Two-phase mixture + /// + /// # Arguments + /// + /// * `h_f` - Saturated liquid enthalpy at outlet pressure (J/kg) + /// * `h_g` - Saturated vapor enthalpy at outlet pressure (J/kg) + /// + /// # Returns + /// + /// Returns `Ok(quality)` if outlet is in two-phase region, + /// or `Err(ComponentError)` if quality calculation is not applicable. + pub fn outlet_quality(&self, h_f: f64, h_g: f64) -> Result { + let h_out = self.port_outlet.enthalpy().to_joules_per_kg(); + + let h_range = h_g - h_f; + if h_range <= 0.0 { + return Err(ComponentError::NumericalError( + "Invalid saturation enthalpy range (h_g must be greater than h_f)".to_string(), + )); + } + + if h_out < h_f || h_out > h_g { + return Err(ComponentError::InvalidState(format!( + "Outlet is not in two-phase region: h_out={} J/kg, h_f={} J/kg, h_g={} J/kg", + h_out, h_f, h_g + ))); + } + + Ok((h_out - h_f) / h_range) + } + + /// Validates that phase change occurs from inlet to outlet. + /// + /// For isenthalpic expansion, the outlet should typically be in two-phase + /// if the inlet was subcooled liquid. + /// + /// # Arguments + /// + /// * `h_f_out` - Saturated liquid enthalpy at outlet pressure (J/kg) + /// * `h_g_out` - Saturated vapor enthalpy at outlet pressure (J/kg) + /// + /// # Returns + /// + /// Returns `Ok(true)` if phase change occurs from inlet to outlet. + /// This is detected when the outlet is in two-phase region. + pub fn validate_phase_change( + &self, + h_f_out: f64, + h_g_out: f64, + ) -> Result { + let _h_in = self.port_inlet.enthalpy().to_joules_per_kg(); + let h_out = self.port_outlet.enthalpy().to_joules_per_kg(); + + if h_out >= h_f_out && h_out <= h_g_out { + return Ok(true); + } + + Ok(false) + } +} + +impl Component for ExpansionValve { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + if residuals.len() != self.n_equations() { + return Err(ComponentError::InvalidResidualDimensions { + expected: self.n_equations(), + actual: residuals.len(), + }); + } + + if self.is_effectively_off() { + if state.is_empty() { + return Err(ComponentError::InvalidStateDimensions { + expected: MIN_STATE_DIMENSIONS, + actual: 0, + }); + } + residuals[0] = state[0]; + residuals[1] = 0.0; + return Ok(()); + } + + match self.operational_state { + OperationalState::Bypass => { + let p_in = self.port_inlet.pressure().to_pascals(); + let p_out = self.port_outlet.pressure().to_pascals(); + let h_in = self.port_inlet.enthalpy().to_joules_per_kg(); + let h_out = self.port_outlet.enthalpy().to_joules_per_kg(); + + residuals[0] = p_out - p_in; + residuals[1] = h_out - h_in; + return Ok(()); + } + OperationalState::On | OperationalState::Off => {} + } + + if state.len() < MIN_STATE_DIMENSIONS { + return Err(ComponentError::InvalidStateDimensions { + expected: MIN_STATE_DIMENSIONS, + actual: state.len(), + }); + } + + let h_in = self.port_inlet.enthalpy().to_joules_per_kg(); + let h_out = self.port_outlet.enthalpy().to_joules_per_kg(); + + residuals[0] = h_out - h_in; + + // Mass flow: ṁ_out = f_m × ṁ_in (calibration factor on inlet flow) + let mass_flow_in = state[0]; + let mass_flow_out = state[1]; + residuals[1] = mass_flow_out - self.calib.f_m * mass_flow_in; + + Ok(()) + } + + fn jacobian_entries( + &self, + _state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + if self.is_effectively_off() { + jacobian.add_entry(0, 0, 1.0); + jacobian.add_entry(1, 0, 0.0); + return Ok(()); + } + + match self.operational_state { + OperationalState::Bypass => { + jacobian.add_entry(0, 0, 1.0); + jacobian.add_entry(0, 1, -1.0); + jacobian.add_entry(1, 0, 1.0); + jacobian.add_entry(1, 1, -1.0); + return Ok(()); + } + OperationalState::On | OperationalState::Off => {} + } + + jacobian.add_entry(0, 0, 0.0); + jacobian.add_entry(0, 1, 0.0); + jacobian.add_entry(1, 0, -self.calib.f_m); + jacobian.add_entry(1, 1, 1.0); + + Ok(()) + } + + fn n_equations(&self) -> usize { + 2 + } + + fn get_ports(&self) -> &[ConnectedPort] { + &[] + } +} + +use crate::state_machine::StateManageable; + +impl StateManageable for ExpansionValve { + fn state(&self) -> OperationalState { + self.operational_state + } + + fn set_state(&mut self, state: OperationalState) -> Result<(), ComponentError> { + if self.operational_state.can_transition_to(state) { + let from = self.operational_state; + self.operational_state = state; + self.on_state_change(from, state); + Ok(()) + } else { + Err(ComponentError::InvalidStateTransition { + from: self.operational_state, + to: state, + reason: "Transition not allowed".to_string(), + }) + } + } + + fn can_transition_to(&self, target: OperationalState) -> bool { + self.operational_state.can_transition_to(target) + } + + fn circuit_id(&self) -> &CircuitId { + &self.circuit_id + } + + fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.circuit_id = circuit_id; + } +} + +#[cfg(test)] +mod tests { + use super::*; + use approx::assert_relative_eq; + use entropyk_core::{Enthalpy, Pressure}; + + fn create_test_valve() -> ExpansionValve { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let (inlet_conn, mut outlet_conn) = inlet.connect(outlet).unwrap(); + + outlet_conn.set_pressure(Pressure::from_bar(3.5)); + + ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::On, + opening: Some(1.0), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + } + } + + fn create_disconnected_valve() -> ExpansionValve { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + ExpansionValve::new(inlet, outlet, Some(1.0)).unwrap() + } + + #[test] + fn test_valve_creation() { + let valve = create_disconnected_valve(); + assert_eq!(valve.fluid_id().as_str(), "R134a"); + assert_eq!(valve.opening(), Some(1.0)); + assert_eq!(valve.operational_state(), OperationalState::On); + } + + #[test] + fn test_valve_creation_without_opening() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let valve = ExpansionValve::new(inlet, outlet, None).unwrap(); + assert_eq!(valve.opening(), None); + } + + #[test] + fn test_valve_creation_invalid_opening_high() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let result = ExpansionValve::new(inlet, outlet, Some(1.5)); + assert!(result.is_err()); + } + + #[test] + fn test_valve_creation_invalid_opening_low() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let result = ExpansionValve::new(inlet, outlet, Some(-0.1)); + assert!(result.is_err()); + } + + #[test] + fn test_valve_creation_nan_opening() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let result = ExpansionValve::new(inlet, outlet, Some(f64::NAN)); + assert!(result.is_err()); + } + + #[test] + fn test_valve_creation_incompatible_fluids() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R410A"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let result = ExpansionValve::new(inlet, outlet, Some(1.0)); + assert!(result.is_err()); + } + + #[test] + fn test_isenthalpic_expansion() { + let valve = create_test_valve(); + assert_relative_eq!( + valve.port_inlet().enthalpy().to_joules_per_kg(), + valve.port_outlet().enthalpy().to_joules_per_kg(), + epsilon = 1e-10 + ); + } + + #[test] + fn test_validate_isenthalpic() { + let valve = create_test_valve(); + let result = valve.validate_isenthalpic(); + assert!(result.is_ok()); + assert!(result.unwrap()); + } + + #[test] + fn test_pressure_drop() { + let valve = create_test_valve(); + let p_in = valve.port_inlet().pressure().to_bar(); + let p_out = valve.port_outlet().pressure().to_bar(); + assert!(p_out < p_in, "Outlet pressure should be less than inlet"); + } + + #[test] + fn test_validate_pressure_drop() { + let valve = create_test_valve(); + let result = valve.validate_pressure_drop(); + assert!(result.is_ok()); + assert!(result.unwrap()); + } + + #[test] + fn test_pressure_ratio() { + let valve = create_test_valve(); + let ratio = valve.pressure_ratio(); + assert_relative_eq!(ratio, 0.35, epsilon = 1e-10); + } + + #[test] + fn test_off_mode() { + let mut valve = create_test_valve(); + valve.set_operational_state(OperationalState::Off); + + let state = vec![0.05, 0.05]; + let mut residuals = vec![0.0; 2]; + + valve.compute_residuals(&state, &mut residuals).unwrap(); + + assert_eq!(valve.operational_state(), OperationalState::Off); + assert!(valve.is_effectively_off()); + } + + #[test] + fn test_bypass_mode() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let (inlet_conn, outlet_conn) = inlet.connect(outlet).unwrap(); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::Bypass, + opening: Some(1.0), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + let state = vec![0.05, 0.05]; + let mut residuals = vec![0.0; 2]; + + valve.compute_residuals(&state, &mut residuals).unwrap(); + + assert_eq!(valve.operational_state(), OperationalState::Bypass); + assert_relative_eq!(residuals[0], 0.0, epsilon = 1e-10); + assert_relative_eq!(residuals[1], 0.0, epsilon = 1e-10); + } + + #[test] + fn test_opening_threshold_off() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let (inlet_conn, mut outlet_conn) = inlet.connect(outlet).unwrap(); + outlet_conn.set_pressure(Pressure::from_bar(3.5)); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::On, + opening: Some(0.005), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + assert!(valve.is_effectively_off()); + } + + #[test] + fn test_opening_threshold_on() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let (inlet_conn, mut outlet_conn) = inlet.connect(outlet).unwrap(); + outlet_conn.set_pressure(Pressure::from_bar(3.5)); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::On, + opening: Some(0.5), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + assert!(!valve.is_effectively_off()); + } + + #[test] + fn test_component_n_equations() { + let valve = create_test_valve(); + assert_eq!(valve.n_equations(), 2); + } + + #[test] + fn test_component_compute_residuals() { + let valve = create_test_valve(); + let state = vec![0.05, 0.05]; + let mut residuals = vec![0.0; 2]; + + let result = valve.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + + assert_relative_eq!(residuals[0], 0.0, epsilon = 1e-10); + assert_relative_eq!(residuals[1], 0.0, epsilon = 1e-10); + } + + #[test] + fn test_component_compute_residuals_wrong_size() { + let valve = create_test_valve(); + let state = vec![0.05, 0.05]; + let mut residuals = vec![0.0; 3]; + + let result = valve.compute_residuals(&state, &mut residuals); + assert!(result.is_err()); + } + + #[test] + fn test_component_jacobian_entries() { + let valve = create_test_valve(); + let state = vec![0.05, 0.05]; + let mut jacobian = JacobianBuilder::new(); + + let result = valve.jacobian_entries(&state, &mut jacobian); + assert!(result.is_ok()); + assert!(!jacobian.is_empty()); + } + + #[test] + fn test_circuit_id() { + let mut valve = create_disconnected_valve(); + valve.set_circuit_id(CircuitId::new("primary")); + assert_eq!(valve.circuit_id().as_str(), "primary"); + } + + #[test] + fn test_get_ports_slice() { + let valve = create_test_valve(); + let ports = valve.get_ports_slice(); + assert_eq!(ports.len(), 2); + assert_eq!(ports[0].fluid_id().as_str(), "R134a"); + assert_eq!(ports[1].fluid_id().as_str(), "R134a"); + } + + #[test] + fn test_clone() { + let valve = create_test_valve(); + let cloned = valve.clone(); + assert_eq!(valve.opening(), cloned.opening()); + assert_eq!(valve.operational_state(), cloned.operational_state()); + } + + #[test] + fn test_mass_flow_continuity_residual() { + let valve = create_test_valve(); + let state = vec![0.05, 0.06]; + let mut residuals = vec![0.0; 2]; + + valve.compute_residuals(&state, &mut residuals).unwrap(); + + assert_relative_eq!(residuals[1], 0.01, epsilon = 1e-10); + } + + #[test] + fn test_set_opening_valid() { + let mut valve = create_test_valve(); + assert!(valve.set_opening(Some(0.5)).is_ok()); + assert_eq!(valve.opening(), Some(0.5)); + } + + #[test] + fn test_set_opening_invalid_high() { + let mut valve = create_test_valve(); + assert!(valve.set_opening(Some(1.5)).is_err()); + } + + #[test] + fn test_set_opening_invalid_low() { + let mut valve = create_test_valve(); + assert!(valve.set_opening(Some(-0.1)).is_err()); + } + + #[test] + fn test_set_opening_nan() { + let mut valve = create_test_valve(); + assert!(valve.set_opening(Some(f64::NAN)).is_err()); + } + + #[test] + fn test_set_opening_none() { + let mut valve = create_test_valve(); + assert!(valve.set_opening(None).is_ok()); + assert_eq!(valve.opening(), None); + } + + #[test] + fn test_on_mode_empty_state_error() { + let valve = create_test_valve(); + let state: Vec = vec![]; + let mut residuals = vec![0.0; 2]; + + let result = valve.compute_residuals(&state, &mut residuals); + assert!(result.is_err()); + } + + #[test] + fn test_off_mode_empty_state_error() { + let mut valve = create_test_valve(); + valve.set_operational_state(OperationalState::Off); + + let state: Vec = vec![]; + let mut residuals = vec![0.0; 2]; + + let result = valve.compute_residuals(&state, &mut residuals); + assert!(result.is_err()); + } + + #[test] + fn test_pressure_ratio_zero_inlet() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(0.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(0.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let (inlet_conn, mut outlet_conn) = inlet.connect(outlet).unwrap(); + outlet_conn.set_pressure(Pressure::from_pascals(0.0)); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::On, + opening: Some(1.0), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + assert_relative_eq!(valve.pressure_ratio(), 0.0, epsilon = 1e-10); + } + + #[test] + fn test_validate_isenthalpic_with_tolerance() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250050.0), + ); + let (inlet_conn, outlet_conn) = inlet.connect(outlet).unwrap(); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::On, + opening: Some(1.0), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + let result = valve.validate_isenthalpic(); + assert!(result.is_ok()); + assert!(result.unwrap()); + } + + #[test] + fn test_bypass_mode_jacobian() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let (inlet_conn, outlet_conn) = inlet.connect(outlet).unwrap(); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::Bypass, + opening: Some(1.0), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + let state = vec![0.05, 0.05]; + let mut jacobian = JacobianBuilder::new(); + + valve.jacobian_entries(&state, &mut jacobian).unwrap(); + + let entries = jacobian.entries(); + assert!(entries.len() >= 4); + + let has_nonzero = entries.iter().any(|(_, _, v)| *v != 0.0); + assert!(has_nonzero, "Bypass jacobian should have non-zero entries"); + } + + #[test] + fn test_state_manageable_state() { + let valve = create_test_valve(); + assert_eq!(valve.state(), OperationalState::On); + } + + #[test] + fn test_state_manageable_set_state_on_to_off() { + let mut valve = create_test_valve(); + let result = valve.set_state(OperationalState::Off); + assert!(result.is_ok()); + assert_eq!(valve.state(), OperationalState::Off); + } + + #[test] + fn test_state_manageable_set_state_on_to_bypass() { + let mut valve = create_test_valve(); + let result = valve.set_state(OperationalState::Bypass); + assert!(result.is_ok()); + assert_eq!(valve.state(), OperationalState::Bypass); + } + + #[test] + fn test_state_manageable_can_transition_to() { + let valve = create_test_valve(); + assert!(valve.can_transition_to(OperationalState::Off)); + assert!(valve.can_transition_to(OperationalState::Bypass)); + assert!(valve.can_transition_to(OperationalState::On)); + } + + #[test] + fn test_state_manageable_circuit_id() { + let valve = create_test_valve(); + assert_eq!(valve.circuit_id().as_str(), "default"); + } + + #[test] + fn test_state_manageable_set_circuit_id() { + let mut valve = create_test_valve(); + valve.set_circuit_id(CircuitId::new("secondary")); + assert_eq!(valve.circuit_id().as_str(), "secondary"); + } + + #[test] + fn test_state_transition_cycle() { + let mut valve = create_test_valve(); + + // On -> Off + valve.set_state(OperationalState::Off).unwrap(); + assert_eq!(valve.state(), OperationalState::Off); + + // Off -> Bypass + valve.set_state(OperationalState::Bypass).unwrap(); + assert_eq!(valve.state(), OperationalState::Bypass); + + // Bypass -> On + valve.set_state(OperationalState::On).unwrap(); + assert_eq!(valve.state(), OperationalState::On); + } + + #[test] + fn test_detect_phase_region_subcooled() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(200000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(200000.0), + ); + let (inlet_conn, mut outlet_conn) = inlet.connect(outlet).unwrap(); + outlet_conn.set_pressure(Pressure::from_bar(3.5)); + outlet_conn.set_enthalpy(Enthalpy::from_joules_per_kg(180000.0)); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::On, + opening: Some(1.0), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + let h_f = 200000.0; + let h_g = 400000.0; + let region = valve.detect_phase_region(h_f, h_g); + assert_eq!(region, PhaseRegion::Subcooled); + } + + #[test] + fn test_detect_phase_region_two_phase() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let (inlet_conn, mut outlet_conn) = inlet.connect(outlet).unwrap(); + outlet_conn.set_pressure(Pressure::from_bar(3.5)); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::On, + opening: Some(1.0), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + let h_f = 200000.0; + let h_g = 400000.0; + let region = valve.detect_phase_region(h_f, h_g); + assert_eq!(region, PhaseRegion::TwoPhase); + assert!(region.is_two_phase()); + } + + #[test] + fn test_detect_phase_region_superheated() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(450000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(450000.0), + ); + let (inlet_conn, mut outlet_conn) = inlet.connect(outlet).unwrap(); + outlet_conn.set_pressure(Pressure::from_bar(3.5)); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::On, + opening: Some(1.0), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + let h_f = 200000.0; + let h_g = 400000.0; + let region = valve.detect_phase_region(h_f, h_g); + assert_eq!(region, PhaseRegion::Superheated); + } + + #[test] + fn test_outlet_quality_valid() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let (inlet_conn, mut outlet_conn) = inlet.connect(outlet).unwrap(); + outlet_conn.set_pressure(Pressure::from_bar(3.5)); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::On, + opening: Some(1.0), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + let h_f = 200000.0; + let h_g = 400000.0; + let quality = valve.outlet_quality(h_f, h_g).unwrap(); + assert_relative_eq!(quality, 0.25, epsilon = 1e-10); + } + + #[test] + fn test_outlet_quality_saturated_liquid() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(200000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(200000.0), + ); + let (inlet_conn, mut outlet_conn) = inlet.connect(outlet).unwrap(); + outlet_conn.set_pressure(Pressure::from_bar(3.5)); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::On, + opening: Some(1.0), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + let h_f = 200000.0; + let h_g = 400000.0; + let quality = valve.outlet_quality(h_f, h_g).unwrap(); + assert_relative_eq!(quality, 0.0, epsilon = 1e-10); + } + + #[test] + fn test_outlet_quality_invalid_not_two_phase() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(450000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(450000.0), + ); + let (inlet_conn, mut outlet_conn) = inlet.connect(outlet).unwrap(); + outlet_conn.set_pressure(Pressure::from_bar(3.5)); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::On, + opening: Some(1.0), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + let h_f = 200000.0; + let h_g = 400000.0; + let result = valve.outlet_quality(h_f, h_g); + assert!(result.is_err()); + } + + #[test] + fn test_validate_phase_change_detected() { + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let (inlet_conn, mut outlet_conn) = inlet.connect(outlet).unwrap(); + outlet_conn.set_pressure(Pressure::from_bar(3.5)); + + let valve = ExpansionValve { + port_inlet: inlet_conn, + port_outlet: outlet_conn, + calib: Calib::default(), + operational_state: OperationalState::On, + opening: Some(1.0), + fluid_id: FluidId::new("R134a"), + circuit_id: CircuitId::default(), + _state: PhantomData, + }; + + let h_f_out = 180000.0; + let h_g_out = 380000.0; + let result = valve.validate_phase_change(h_f_out, h_g_out).unwrap(); + assert!(result); + } + + #[test] + fn test_phase_region_enum() { + assert!(PhaseRegion::Subcooled.is_two_phase() == false); + assert!(PhaseRegion::TwoPhase.is_two_phase() == true); + assert!(PhaseRegion::Superheated.is_two_phase() == false); + } +} diff --git a/crates/components/src/external_model.rs b/crates/components/src/external_model.rs new file mode 100644 index 0000000..32cdb7b --- /dev/null +++ b/crates/components/src/external_model.rs @@ -0,0 +1,628 @@ +//! External Component Model Interface +//! +//! This module provides support for external component models via: +//! - Dynamic library loading (.dll/.so) via FFI +//! - HTTP API calls to external services +//! +//! ## Architecture +//! +//! The external model interface allows integration of proprietary or vendor-supplied +//! component models that cannot be implemented natively in Rust. +//! +//! ## FFI Interface (DLL/SO) +//! +//! External libraries must implement the `entropyk_model` C ABI: +//! +//! ```c +//! // Required exported functions: +//! int entropyk_model_compute(double* inputs, double* outputs, int n_in, int n_out); +//! int entropyk_model_jacobian(double* inputs, double* jacobian, int n_in, int n_out); +//! const char* entropyk_model_name(void); +//! const char* entropyk_model_version(void); +//! ``` +//! +//! ## HTTP API Interface +//! +//! External services must provide REST endpoints: +//! +//! - `POST /compute`: Accepts JSON with inputs, returns JSON with outputs +//! - `POST /jacobian`: Accepts JSON with inputs, returns JSON with Jacobian matrix + +use crate::ComponentError; +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; +use std::sync::Arc; + +/// Configuration for an external model. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ExternalModelConfig { + /// Unique identifier for this model + pub id: String, + /// Model type (ffi or http) + pub model_type: ExternalModelType, + /// Number of inputs expected + pub n_inputs: usize, + /// Number of outputs produced + pub n_outputs: usize, + /// Optional timeout in milliseconds + #[serde(default = "default_timeout")] + pub timeout_ms: u64, +} + +fn default_timeout() -> u64 { + 5000 +} + +/// Type of external model interface. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub enum ExternalModelType { + /// Dynamic library (.dll on Windows, .so on Linux, .dylib on macOS) + Ffi { + /// Path to the library file + library_path: PathBuf, + /// Optional function name prefix + function_prefix: Option, + }, + /// HTTP REST API + Http { + /// Base URL for the API + base_url: String, + /// Optional API key for authentication + api_key: Option, + }, +} + +/// Trait for external model implementations. +/// +/// This trait abstracts over FFI and HTTP interfaces, providing +/// a unified interface for the solver. +pub trait ExternalModel: Send + Sync { + /// Returns the model identifier. + fn id(&self) -> &str; + + /// Returns the number of inputs. + fn n_inputs(&self) -> usize; + + /// Returns the number of outputs. + fn n_outputs(&self) -> usize; + + /// Computes outputs from inputs. + /// + /// # Arguments + /// + /// * `inputs` - Input values (length = n_inputs) + /// + /// # Returns + /// + /// Output values (length = n_outputs) + fn compute(&self, inputs: &[f64]) -> Result, ExternalModelError>; + + /// Computes the Jacobian matrix. + /// + /// # Arguments + /// + /// * `inputs` - Input values + /// + /// # Returns + /// + /// Jacobian matrix as a flat array (row-major, n_outputs × n_inputs) + fn jacobian(&self, inputs: &[f64]) -> Result, ExternalModelError>; + + /// Returns model metadata. + fn metadata(&self) -> ExternalModelMetadata; +} + +/// Metadata about an external model. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ExternalModelMetadata { + /// Model name + pub name: String, + /// Model version + pub version: String, + /// Model description + pub description: Option, + /// Input names/units + pub input_names: Vec, + /// Output names/units + pub output_names: Vec, +} + +/// Errors from external model operations. +#[derive(Debug, Clone, thiserror::Error)] +pub enum ExternalModelError { + /// Library loading failed + #[error("Failed to load library: {0}")] + LibraryLoad(String), + + /// Function not found in library + #[error("Function not found: {0}")] + FunctionNotFound(String), + + /// Computation failed + #[error("Computation failed: {0}")] + ComputationFailed(String), + + /// Invalid input dimensions + #[error("Invalid input dimensions: expected {expected}, got {actual}")] + InvalidInputDimensions { + /// Expected number of inputs + expected: usize, + /// Actual number received + actual: usize, + }, + + /// HTTP request failed + #[error("HTTP request failed: {0}")] + HttpError(String), + + /// Timeout exceeded + #[error("Operation timed out after {0}ms")] + Timeout(u64), + + /// JSON parsing error + #[error("JSON error: {0}")] + JsonError(String), + + /// Model not initialized + #[error("Model not initialized")] + NotInitialized, +} + +impl From for ComponentError { + fn from(err: ExternalModelError) -> Self { + ComponentError::InvalidState(format!("External model error: {}", err)) + } +} + +/// Request body for HTTP compute endpoint. +#[derive(Debug, Serialize)] +#[allow(dead_code)] +struct ComputeRequest { + inputs: Vec, +} + +/// Response from HTTP compute endpoint. +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +struct ComputeResponse { + outputs: Vec, +} + +/// Request body for HTTP Jacobian endpoint. +#[derive(Debug, Serialize)] +#[allow(dead_code)] +struct JacobianRequest { + inputs: Vec, +} + +/// Response from HTTP Jacobian endpoint. +#[derive(Debug, Deserialize)] +#[allow(dead_code)] +struct JacobianResponse { + jacobian: Vec, +} + +/// FFI-based external model (mock implementation for non-ffi builds). +/// +/// When the `ffi` feature is not enabled, this provides a mock implementation +/// that can be used for testing and development. The mock passes inputs through +/// unchanged (identity function). +#[cfg(not(feature = "ffi"))] +pub struct FfiModel { + config: ExternalModelConfig, + metadata: ExternalModelMetadata, +} + +#[cfg(not(feature = "ffi"))] +impl FfiModel { + /// Creates a new FFI model (mock implementation without ffi feature). + /// + /// This creates a mock model that can be used for testing. The mock + /// implements an identity function (output = input for first n_outputs). + pub fn new(config: ExternalModelConfig) -> Result { + let metadata = ExternalModelMetadata { + name: format!("Mock FFI Model: {}", config.id), + version: "0.1.0-mock".to_string(), + description: Some("Mock FFI model for testing (ffi feature not enabled)".to_string()), + input_names: (0..config.n_inputs) + .map(|i| format!("input_{}", i)) + .collect(), + output_names: (0..config.n_outputs) + .map(|i| format!("output_{}", i)) + .collect(), + }; + Ok(Self { config, metadata }) + } + + /// Creates with custom mock metadata for testing. + pub fn new_mock( + config: ExternalModelConfig, + metadata: ExternalModelMetadata, + ) -> Result { + Ok(Self { config, metadata }) + } +} + +#[cfg(not(feature = "ffi"))] +impl ExternalModel for FfiModel { + fn id(&self) -> &str { + &self.config.id + } + + fn n_inputs(&self) -> usize { + self.config.n_inputs + } + + fn n_outputs(&self) -> usize { + self.config.n_outputs + } + + fn compute(&self, inputs: &[f64]) -> Result, ExternalModelError> { + if inputs.len() != self.config.n_inputs { + return Err(ExternalModelError::InvalidInputDimensions { + expected: self.config.n_inputs, + actual: inputs.len(), + }); + } + // Mock: pass through inputs (identity for first n_outputs, zero padding) + let mut outputs = vec![0.0; self.config.n_outputs]; + for (i, &input) in inputs.iter().take(self.config.n_outputs).enumerate() { + outputs[i] = input; + } + Ok(outputs) + } + + fn jacobian(&self, inputs: &[f64]) -> Result, ExternalModelError> { + if inputs.len() != self.config.n_inputs { + return Err(ExternalModelError::InvalidInputDimensions { + expected: self.config.n_inputs, + actual: inputs.len(), + }); + } + // Mock: returns identity-like Jacobian + let mut jacobian = vec![0.0; self.config.n_inputs * self.config.n_outputs]; + let min_dim = self.config.n_inputs.min(self.config.n_outputs); + for i in 0..min_dim { + jacobian[i * self.config.n_inputs + i] = 1.0; + } + Ok(jacobian) + } + + fn metadata(&self) -> ExternalModelMetadata { + self.metadata.clone() + } +} + +/// HTTP-based external model (mock implementation for non-http builds). +/// +/// When the `http` feature is not enabled, this provides a mock implementation +/// that can be used for testing and development. The mock passes inputs through +/// unchanged (identity function). +#[cfg(not(feature = "http"))] +pub struct HttpModel { + config: ExternalModelConfig, + metadata: ExternalModelMetadata, +} + +#[cfg(not(feature = "http"))] +impl HttpModel { + /// Creates a new HTTP model (mock implementation without http feature). + /// + /// This creates a mock model that can be used for testing. The mock + /// implements an identity function (output = input for first n_outputs). + pub fn new(config: ExternalModelConfig) -> Result { + let metadata = ExternalModelMetadata { + name: format!("Mock HTTP Model: {}", config.id), + version: "0.1.0-mock".to_string(), + description: Some("Mock HTTP model for testing (http feature not enabled)".to_string()), + input_names: (0..config.n_inputs) + .map(|i| format!("input_{}", i)) + .collect(), + output_names: (0..config.n_outputs) + .map(|i| format!("output_{}", i)) + .collect(), + }; + Ok(Self { config, metadata }) + } + + /// Creates with custom mock metadata for testing. + pub fn new_mock( + config: ExternalModelConfig, + metadata: ExternalModelMetadata, + ) -> Result { + Ok(Self { config, metadata }) + } +} + +#[cfg(not(feature = "http"))] +impl ExternalModel for HttpModel { + fn id(&self) -> &str { + &self.config.id + } + + fn n_inputs(&self) -> usize { + self.config.n_inputs + } + + fn n_outputs(&self) -> usize { + self.config.n_outputs + } + + fn compute(&self, inputs: &[f64]) -> Result, ExternalModelError> { + if inputs.len() != self.config.n_inputs { + return Err(ExternalModelError::InvalidInputDimensions { + expected: self.config.n_inputs, + actual: inputs.len(), + }); + } + // Mock: pass through inputs (identity for first n_outputs, zero padding) + let mut outputs = vec![0.0; self.config.n_outputs]; + for (i, &input) in inputs.iter().take(self.config.n_outputs).enumerate() { + outputs[i] = input; + } + Ok(outputs) + } + + fn jacobian(&self, inputs: &[f64]) -> Result, ExternalModelError> { + if inputs.len() != self.config.n_inputs { + return Err(ExternalModelError::InvalidInputDimensions { + expected: self.config.n_inputs, + actual: inputs.len(), + }); + } + // Mock: returns identity-like Jacobian + let mut jacobian = vec![0.0; self.config.n_inputs * self.config.n_outputs]; + let min_dim = self.config.n_inputs.min(self.config.n_outputs); + for i in 0..min_dim { + jacobian[i * self.config.n_inputs + i] = 1.0; + } + Ok(jacobian) + } + + fn metadata(&self) -> ExternalModelMetadata { + self.metadata.clone() + } +} + +/// Thread-safe wrapper for external models. +/// +/// This wrapper ensures safe concurrent access to external models, +/// which may not be thread-safe themselves. +pub struct ThreadSafeExternalModel { + inner: Arc, +} + +impl ThreadSafeExternalModel { + /// Creates a new thread-safe wrapper. + pub fn new(model: impl ExternalModel + 'static) -> Self { + Self { + inner: Arc::new(model), + } + } + + /// Creates from an existing Arc. + pub fn from_arc(model: Arc) -> Self { + Self { inner: model } + } + + /// Returns a reference to the inner model. + pub fn inner(&self) -> &dyn ExternalModel { + self.inner.as_ref() + } +} + +impl Clone for ThreadSafeExternalModel { + fn clone(&self) -> Self { + Self { + inner: Arc::clone(&self.inner), + } + } +} + +impl std::fmt::Debug for ThreadSafeExternalModel { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ThreadSafeExternalModel") + .field("id", &self.inner.id()) + .finish() + } +} + +/// Mock external model for testing. +#[derive(Debug, Clone)] +pub struct MockExternalModel { + id: String, + n_inputs: usize, + n_outputs: usize, + compute_fn: fn(&[f64]) -> Vec, +} + +impl MockExternalModel { + /// Creates a new mock model. + pub fn new( + id: impl Into, + n_inputs: usize, + n_outputs: usize, + compute_fn: fn(&[f64]) -> Vec, + ) -> Self { + Self { + id: id.into(), + n_inputs, + n_outputs, + compute_fn, + } + } + + /// Creates a simple linear model: y = x + pub fn linear_passthrough(n: usize) -> Self { + Self::new("linear_passthrough", n, n, |x| x.to_vec()) + } + + /// Creates a model that doubles inputs. + pub fn doubler(n: usize) -> Self { + Self::new("doubler", n, n, |x| x.iter().map(|v| v * 2.0).collect()) + } +} + +impl ExternalModel for MockExternalModel { + fn id(&self) -> &str { + &self.id + } + + fn n_inputs(&self) -> usize { + self.n_inputs + } + + fn n_outputs(&self) -> usize { + self.n_outputs + } + + fn compute(&self, inputs: &[f64]) -> Result, ExternalModelError> { + if inputs.len() != self.n_inputs { + return Err(ExternalModelError::InvalidInputDimensions { + expected: self.n_inputs, + actual: inputs.len(), + }); + } + Ok((self.compute_fn)(inputs)) + } + + fn jacobian(&self, inputs: &[f64]) -> Result, ExternalModelError> { + // Default: finite difference approximation + let h = 1e-6; + let mut jacobian = vec![0.0; self.n_outputs * self.n_inputs]; + + for j in 0..self.n_inputs { + let mut inputs_plus = inputs.to_vec(); + let mut inputs_minus = inputs.to_vec(); + inputs_plus[j] += h; + inputs_minus[j] -= h; + + let y_plus = self.compute(&inputs_plus)?; + let y_minus = self.compute(&inputs_minus)?; + + for i in 0..self.n_outputs { + jacobian[i * self.n_inputs + j] = (y_plus[i] - y_minus[i]) / (2.0 * h); + } + } + + Ok(jacobian) + } + + fn metadata(&self) -> ExternalModelMetadata { + ExternalModelMetadata { + name: self.id.clone(), + version: "1.0.0".to_string(), + description: Some("Mock external model for testing".to_string()), + input_names: (0..self.n_inputs).map(|i| format!("input_{}", i)).collect(), + output_names: (0..self.n_outputs) + .map(|i| format!("output_{}", i)) + .collect(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_mock_external_model_compute() { + let model = MockExternalModel::doubler(3); + let result = model.compute(&[1.0, 2.0, 3.0]).unwrap(); + assert_eq!(result, vec![2.0, 4.0, 6.0]); + } + + #[test] + fn test_mock_external_model_dimensions() { + let model = MockExternalModel::doubler(3); + assert_eq!(model.n_inputs(), 3); + assert_eq!(model.n_outputs(), 3); + } + + #[test] + fn test_mock_external_model_invalid_input() { + let model = MockExternalModel::doubler(3); + let result = model.compute(&[1.0, 2.0]); + assert!(result.is_err()); + } + + #[test] + fn test_mock_external_model_jacobian() { + let model = MockExternalModel::doubler(2); + let jac = model.jacobian(&[1.0, 2.0]).unwrap(); + + // Jacobian of y = 2x should be [[2, 0], [0, 2]] + assert!((jac[0] - 2.0).abs() < 0.01); + assert!((jac[1] - 0.0).abs() < 0.01); + assert!((jac[2] - 0.0).abs() < 0.01); + assert!((jac[3] - 2.0).abs() < 0.01); + } + + #[test] + fn test_thread_safe_wrapper() { + let model = MockExternalModel::doubler(2); + let wrapped = ThreadSafeExternalModel::new(model); + + let result = wrapped.inner().compute(&[1.0, 2.0]).unwrap(); + assert_eq!(result, vec![2.0, 4.0]); + } + + #[test] + fn test_thread_safe_clone() { + let model = MockExternalModel::doubler(2); + let wrapped = ThreadSafeExternalModel::new(model); + let cloned = wrapped.clone(); + + assert_eq!(wrapped.inner().id(), cloned.inner().id()); + } + + #[test] + fn test_external_model_metadata() { + let model = MockExternalModel::doubler(2); + let meta = model.metadata(); + + assert_eq!(meta.name, "doubler"); + assert_eq!(meta.version, "1.0.0"); + assert_eq!(meta.input_names, vec!["input_0", "input_1"]); + assert_eq!(meta.output_names, vec!["output_0", "output_1"]); + } + + #[test] + fn test_linear_passthrough_model() { + let model = MockExternalModel::linear_passthrough(3); + let result = model.compute(&[1.0, 2.0, 3.0]).unwrap(); + assert_eq!(result, vec![1.0, 2.0, 3.0]); + } + + #[test] + fn test_external_model_config() { + let config = ExternalModelConfig { + id: "test_model".to_string(), + model_type: ExternalModelType::Http { + base_url: "http://localhost:8080".to_string(), + api_key: Some("secret".to_string()), + }, + n_inputs: 4, + n_outputs: 2, + timeout_ms: 3000, + }; + + assert_eq!(config.id, "test_model"); + assert_eq!(config.n_inputs, 4); + assert_eq!(config.n_outputs, 2); + assert_eq!(config.timeout_ms, 3000); + } + + #[test] + fn test_error_conversion() { + let err = ExternalModelError::ComputationFailed("test error".to_string()); + let component_err: ComponentError = err.into(); + + match component_err { + ComponentError::InvalidState(msg) => { + assert!(msg.contains("External model error")); + } + _ => panic!("Expected InvalidState error"), + } + } +} diff --git a/crates/components/src/fan.rs b/crates/components/src/fan.rs new file mode 100644 index 0000000..63d76d6 --- /dev/null +++ b/crates/components/src/fan.rs @@ -0,0 +1,636 @@ +//! Fan Component Implementation +//! +//! This module provides a fan component for air handling systems using +//! polynomial performance curves and affinity laws for variable speed operation. +//! +//! ## Performance Curves +//! +//! **Static Pressure Curve:** P_s = a₀ + a₁Q + a₂Q² + a₃Q³ +//! +//! **Efficiency Curve:** η = b₀ + b₁Q + b₂Q² +//! +//! **Fan Power:** P_fan = Q × P_s / η +//! +//! ## Affinity Laws (Variable Speed) +//! +//! When operating at reduced speed (VFD): +//! - Q₂/Q₁ = N₂/N₁ +//! - P₂/P₁ = (N₂/N₁)² +//! - Pwr₂/Pwr₁ = (N₂/N₁)³ + +use crate::polynomials::{AffinityLaws, PerformanceCurves, Polynomial1D}; +use crate::port::{Connected, Disconnected, FluidId, Port}; +use crate::state_machine::StateManageable; +use crate::{ + CircuitId, Component, ComponentError, ConnectedPort, JacobianBuilder, OperationalState, + ResidualVector, SystemState, +}; +use entropyk_core::{MassFlow, Power}; +use serde::{Deserialize, Serialize}; +use std::marker::PhantomData; + +/// Fan performance curve coefficients. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct FanCurves { + /// Performance curves (static pressure, efficiency) + curves: PerformanceCurves, +} + +impl FanCurves { + /// Creates fan curves from performance curves. + pub fn new(curves: PerformanceCurves) -> Result { + curves.validate()?; + Ok(Self { curves }) + } + + /// Creates fan curves from polynomial coefficients. + /// + /// # Arguments + /// + /// * `pressure_coeffs` - Static pressure curve [a0, a1, a2, ...] in Pa + /// * `eff_coeffs` - Efficiency coefficients [b0, b1, b2, ...] as decimal + /// + /// # Units + /// + /// * Q (flow) in m³/s + /// * P_s (static pressure) in Pascals + /// * η (efficiency) as decimal (0.0 to 1.0) + pub fn from_coefficients( + pressure_coeffs: Vec, + eff_coeffs: Vec, + ) -> Result { + let pressure_curve = Polynomial1D::new(pressure_coeffs); + let eff_curve = Polynomial1D::new(eff_coeffs); + let curves = PerformanceCurves::simple(pressure_curve, eff_curve); + Self::new(curves) + } + + /// Creates a quadratic fan curve. + pub fn quadratic( + p0: f64, + p1: f64, + p2: f64, + e0: f64, + e1: f64, + e2: f64, + ) -> Result { + Self::from_coefficients(vec![p0, p1, p2], vec![e0, e1, e2]) + } + + /// Creates a cubic fan curve (common for fans). + pub fn cubic( + p0: f64, + p1: f64, + p2: f64, + p3: f64, + e0: f64, + e1: f64, + e2: f64, + ) -> Result { + Self::from_coefficients(vec![p0, p1, p2, p3], vec![e0, e1, e2]) + } + + /// Returns static pressure at given flow rate (full speed). + pub fn static_pressure_at_flow(&self, flow_m3_per_s: f64) -> f64 { + self.curves.head_curve.evaluate(flow_m3_per_s) + } + + /// Returns efficiency at given flow rate (full speed). + pub fn efficiency_at_flow(&self, flow_m3_per_s: f64) -> f64 { + let eta = self.curves.efficiency_curve.evaluate(flow_m3_per_s); + eta.clamp(0.0, 1.0) + } + + /// Returns reference to performance curves. + pub fn curves(&self) -> &PerformanceCurves { + &self.curves + } +} + +impl Default for FanCurves { + fn default() -> Self { + Self::quadratic(500.0, 0.0, 0.0, 0.7, 0.0, 0.0).unwrap() + } +} + +/// Standard air properties at sea level (for reference). +pub mod standard_air { + /// Standard air density at 20°C, 101325 Pa (kg/m³) + pub const DENSITY: f64 = 1.204; + /// Standard air specific heat at constant pressure (J/(kg·K)) + pub const CP: f64 = 1005.0; +} + +/// A fan component with polynomial performance curves. +/// +/// Fans differ from pumps in that: +/// - They work with compressible fluids (air) +/// - Static pressure is typically much lower +/// - Common to use cubic curves for pressure +/// +/// # Example +/// +/// ```ignore +/// use entropyk_components::fan::{Fan, FanCurves}; +/// use entropyk_components::port::{FluidId, Port}; +/// use entropyk_core::{Pressure, Enthalpy}; +/// +/// // Create fan curves: P_s = 500 - 50*Q - 10*Q² (Pa, m³/s) +/// let curves = FanCurves::quadratic(500.0, -50.0, -10.0, 0.5, 0.2, -0.1).unwrap(); +/// +/// let inlet = Port::new( +/// FluidId::new("Air"), +/// Pressure::from_bar(1.01325), +/// Enthalpy::from_joules_per_kg(300000.0), +/// ); +/// let outlet = Port::new( +/// FluidId::new("Air"), +/// Pressure::from_bar(1.01325), +/// Enthalpy::from_joules_per_kg(300000.0), +/// ); +/// +/// let fan = Fan::new(curves, inlet, outlet, 1.2).unwrap(); +/// ``` +#[derive(Debug, Clone)] +pub struct Fan { + /// Performance curves + curves: FanCurves, + /// Inlet port + port_inlet: Port, + /// Outlet port + port_outlet: Port, + /// Air density in kg/m³ + air_density_kg_per_m3: f64, + /// Speed ratio (0.0 to 1.0) + speed_ratio: f64, + /// Circuit identifier + circuit_id: CircuitId, + /// Operational state + operational_state: OperationalState, + /// Phantom data for type state + _state: PhantomData, +} + +impl Fan { + /// Creates a new disconnected fan. + /// + /// # Arguments + /// + /// * `curves` - Fan performance curves + /// * `port_inlet` - Inlet port (disconnected) + /// * `port_outlet` - Outlet port (disconnected) + /// * `air_density` - Air density in kg/m³ (use 1.2 for standard conditions) + pub fn new( + curves: FanCurves, + port_inlet: Port, + port_outlet: Port, + air_density: f64, + ) -> Result { + if port_inlet.fluid_id() != port_outlet.fluid_id() { + return Err(ComponentError::InvalidState( + "Inlet and outlet ports must have the same fluid type".to_string(), + )); + } + + if air_density <= 0.0 { + return Err(ComponentError::InvalidState( + "Air density must be positive".to_string(), + )); + } + + Ok(Self { + curves, + port_inlet, + port_outlet, + air_density_kg_per_m3: air_density, + speed_ratio: 1.0, + circuit_id: CircuitId::default(), + operational_state: OperationalState::default(), + _state: PhantomData, + }) + } + + /// Returns the fluid identifier. + pub fn fluid_id(&self) -> &FluidId { + self.port_inlet.fluid_id() + } + + /// Returns the air density. + pub fn air_density(&self) -> f64 { + self.air_density_kg_per_m3 + } + + /// Returns the speed ratio. + pub fn speed_ratio(&self) -> f64 { + self.speed_ratio + } + + /// Sets the speed ratio (0.0 to 1.0). + pub fn set_speed_ratio(&mut self, ratio: f64) -> Result<(), ComponentError> { + if !(0.0..=1.0).contains(&ratio) { + return Err(ComponentError::InvalidState( + "Speed ratio must be between 0.0 and 1.0".to_string(), + )); + } + self.speed_ratio = ratio; + Ok(()) + } +} + +impl Fan { + /// Returns the inlet port. + pub fn port_inlet(&self) -> &Port { + &self.port_inlet + } + + /// Returns the outlet port. + pub fn port_outlet(&self) -> &Port { + &self.port_outlet + } + + /// Calculates the static pressure rise across the fan. + /// + /// Applies affinity laws for variable speed operation. + pub fn static_pressure_rise(&self, flow_m3_per_s: f64) -> f64 { + // Handle zero speed - fan produces no pressure + if self.speed_ratio <= 0.0 { + return 0.0; + } + + // Handle zero flow + if flow_m3_per_s <= 0.0 { + let pressure = self.curves.static_pressure_at_flow(0.0); + return AffinityLaws::scale_head(pressure, self.speed_ratio); + } + + let equivalent_flow = AffinityLaws::unscale_flow(flow_m3_per_s, self.speed_ratio); + let pressure = self.curves.static_pressure_at_flow(equivalent_flow); + AffinityLaws::scale_head(pressure, self.speed_ratio) + } + + /// Calculates total pressure (static + velocity pressure). + /// + /// Total pressure = Static pressure + ½ρv² + /// + /// # Arguments + /// + /// * `flow_m3_per_s` - Volumetric flow rate + /// * `duct_area_m2` - Duct cross-sectional area + pub fn total_pressure_rise(&self, flow_m3_per_s: f64, duct_area_m2: f64) -> f64 { + let static_p = self.static_pressure_rise(flow_m3_per_s); + + if duct_area_m2 <= 0.0 { + return static_p; + } + + // Velocity pressure: P_v = ½ρv² + let velocity = flow_m3_per_s / duct_area_m2; + let velocity_pressure = 0.5 * self.air_density_kg_per_m3 * velocity * velocity; + + static_p + velocity_pressure + } + + /// Calculates efficiency at the given flow rate. + pub fn efficiency(&self, flow_m3_per_s: f64) -> f64 { + // Handle zero speed - fan is not running + if self.speed_ratio <= 0.0 { + return 0.0; + } + + // Handle zero flow + if flow_m3_per_s <= 0.0 { + return self.curves.efficiency_at_flow(0.0); + } + + let equivalent_flow = AffinityLaws::unscale_flow(flow_m3_per_s, self.speed_ratio); + self.curves.efficiency_at_flow(equivalent_flow) + } + + /// Calculates the fan power consumption. + /// + /// P_fan = Q × P_s / η + pub fn fan_power(&self, flow_m3_per_s: f64) -> Power { + if flow_m3_per_s <= 0.0 || self.speed_ratio <= 0.0 { + return Power::from_watts(0.0); + } + + let pressure = self.static_pressure_rise(flow_m3_per_s); + let eta = self.efficiency(flow_m3_per_s); + + if eta <= 0.0 { + return Power::from_watts(0.0); + } + + let power_w = flow_m3_per_s * pressure / eta; + Power::from_watts(power_w) + } + + /// Calculates mass flow from volumetric flow. + pub fn mass_flow_from_volumetric(&self, flow_m3_per_s: f64) -> MassFlow { + MassFlow::from_kg_per_s(flow_m3_per_s * self.air_density_kg_per_m3) + } + + /// Calculates volumetric flow from mass flow. + pub fn volumetric_from_mass_flow(&self, mass_flow: MassFlow) -> f64 { + mass_flow.to_kg_per_s() / self.air_density_kg_per_m3 + } + + /// Returns the air density. + pub fn air_density(&self) -> f64 { + self.air_density_kg_per_m3 + } + + /// Returns the speed ratio. + pub fn speed_ratio(&self) -> f64 { + self.speed_ratio + } + + /// Sets the speed ratio (0.0 to 1.0). + pub fn set_speed_ratio(&mut self, ratio: f64) -> Result<(), ComponentError> { + if !(0.0..=1.0).contains(&ratio) { + return Err(ComponentError::InvalidState( + "Speed ratio must be between 0.0 and 1.0".to_string(), + )); + } + self.speed_ratio = ratio; + Ok(()) + } + + /// Returns both ports as a slice for solver topology. + pub fn get_ports_slice(&self) -> [&Port; 2] { + [&self.port_inlet, &self.port_outlet] + } +} + +impl Component for Fan { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + if residuals.len() != self.n_equations() { + return Err(ComponentError::InvalidResidualDimensions { + expected: self.n_equations(), + actual: residuals.len(), + }); + } + + match self.operational_state { + OperationalState::Off => { + residuals[0] = state[0]; + residuals[1] = 0.0; + return Ok(()); + } + OperationalState::Bypass => { + let p_in = self.port_inlet.pressure().to_pascals(); + let p_out = self.port_outlet.pressure().to_pascals(); + let h_in = self.port_inlet.enthalpy().to_joules_per_kg(); + let h_out = self.port_outlet.enthalpy().to_joules_per_kg(); + + residuals[0] = p_in - p_out; + residuals[1] = h_in - h_out; + return Ok(()); + } + OperationalState::On => {} + } + + if state.len() < 2 { + return Err(ComponentError::InvalidStateDimensions { + expected: 2, + actual: state.len(), + }); + } + + let mass_flow_kg_s = state[0]; + let _power_w = state[1]; + + let flow_m3_s = mass_flow_kg_s / self.air_density_kg_per_m3; + let delta_p_calc = self.static_pressure_rise(flow_m3_s); + + let p_in = self.port_inlet.pressure().to_pascals(); + let p_out = self.port_outlet.pressure().to_pascals(); + let delta_p_actual = p_out - p_in; + + residuals[0] = delta_p_calc - delta_p_actual; + + let power_calc = self.fan_power(flow_m3_s).to_watts(); + residuals[1] = power_calc - _power_w; + + Ok(()) + } + + fn jacobian_entries( + &self, + state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + if state.len() < 2 { + return Err(ComponentError::InvalidStateDimensions { + expected: 2, + actual: state.len(), + }); + } + + let mass_flow_kg_s = state[0]; + let flow_m3_s = mass_flow_kg_s / self.air_density_kg_per_m3; + + let h = 0.001; + let p_plus = self.static_pressure_rise(flow_m3_s + h / self.air_density_kg_per_m3); + let p_minus = self.static_pressure_rise(flow_m3_s - h / self.air_density_kg_per_m3); + let dp_dm = (p_plus - p_minus) / (2.0 * h); + + jacobian.add_entry(0, 0, dp_dm); + jacobian.add_entry(0, 1, 0.0); + + let pow_plus = self + .fan_power(flow_m3_s + h / self.air_density_kg_per_m3) + .to_watts(); + let pow_minus = self + .fan_power(flow_m3_s - h / self.air_density_kg_per_m3) + .to_watts(); + let dpow_dm = (pow_plus - pow_minus) / (2.0 * h); + + jacobian.add_entry(1, 0, dpow_dm); + jacobian.add_entry(1, 1, -1.0); + + Ok(()) + } + + fn n_equations(&self) -> usize { + 2 + } + + fn get_ports(&self) -> &[ConnectedPort] { + &[] + } +} + +impl StateManageable for Fan { + fn state(&self) -> OperationalState { + self.operational_state + } + + fn set_state(&mut self, state: OperationalState) -> Result<(), ComponentError> { + if self.operational_state.can_transition_to(state) { + let from = self.operational_state; + self.operational_state = state; + self.on_state_change(from, state); + Ok(()) + } else { + Err(ComponentError::InvalidStateTransition { + from: self.operational_state, + to: state, + reason: "Transition not allowed".to_string(), + }) + } + } + + fn can_transition_to(&self, target: OperationalState) -> bool { + self.operational_state.can_transition_to(target) + } + + fn circuit_id(&self) -> &CircuitId { + &self.circuit_id + } + + fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.circuit_id = circuit_id; + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::port::FluidId; + use approx::assert_relative_eq; + use entropyk_core::{Enthalpy, Pressure}; + + fn create_test_curves() -> FanCurves { + // Typical centrifugal fan: + // P_s = 500 - 100*Q - 200*Q² (Pa, Q in m³/s) + // η = 0.5 + 0.3*Q - 0.5*Q² + FanCurves::quadratic(500.0, -100.0, -200.0, 0.5, 0.3, -0.5).unwrap() + } + + fn create_test_fan_connected() -> Fan { + let curves = create_test_curves(); + let inlet = Port::new( + FluidId::new("Air"), + Pressure::from_bar(1.01325), + Enthalpy::from_joules_per_kg(300000.0), + ); + let outlet = Port::new( + FluidId::new("Air"), + Pressure::from_bar(1.01325), + Enthalpy::from_joules_per_kg(300000.0), + ); + let (inlet_conn, outlet_conn) = inlet.connect(outlet).unwrap(); + + Fan { + curves, + port_inlet: inlet_conn, + port_outlet: outlet_conn, + air_density_kg_per_m3: 1.2, + speed_ratio: 1.0, + circuit_id: CircuitId::default(), + operational_state: OperationalState::default(), + _state: PhantomData, + } + } + + #[test] + fn test_fan_curves_creation() { + let curves = create_test_curves(); + assert_eq!(curves.static_pressure_at_flow(0.0), 500.0); + assert_relative_eq!(curves.efficiency_at_flow(0.0), 0.5); + } + + #[test] + fn test_fan_static_pressure() { + let curves = create_test_curves(); + // P_s = 500 - 100*1 - 200*1 = 200 Pa + let pressure = curves.static_pressure_at_flow(1.0); + assert_relative_eq!(pressure, 200.0, epsilon = 1e-10); + } + + #[test] + fn test_fan_creation() { + let fan = create_test_fan_connected(); + assert_relative_eq!(fan.air_density(), 1.2, epsilon = 1e-10); + assert_eq!(fan.speed_ratio(), 1.0); + } + + #[test] + fn test_fan_pressure_rise_full_speed() { + let fan = create_test_fan_connected(); + let pressure = fan.static_pressure_rise(0.0); + assert_relative_eq!(pressure, 500.0, epsilon = 1e-10); + } + + #[test] + fn test_fan_pressure_rise_half_speed() { + let mut fan = create_test_fan_connected(); + fan.set_speed_ratio(0.5).unwrap(); + + // At 50% speed, shut-off pressure is 25% of full speed + let pressure = fan.static_pressure_rise(0.0); + assert_relative_eq!(pressure, 125.0, epsilon = 1e-10); + } + + #[test] + fn test_fan_fan_power() { + let fan = create_test_fan_connected(); + + // At Q=1 m³/s: P_s ≈ 200 Pa, η ≈ 0.3 + // P = 1 * 200 / 0.3 ≈ 667 W + let power = fan.fan_power(1.0); + assert!(power.to_watts() > 0.0); + assert!(power.to_watts() < 2000.0); + } + + #[test] + fn test_fan_affinity_laws_power() { + let fan_full = create_test_fan_connected(); + + let mut fan_half = create_test_fan_connected(); + fan_half.set_speed_ratio(0.5).unwrap(); + + let power_full = fan_full.fan_power(1.0); + let power_half = fan_half.fan_power(0.5); + + // Ratio should be approximately 0.125 (cube law) + let ratio = power_half.to_watts() / power_full.to_watts(); + assert_relative_eq!(ratio, 0.125, epsilon = 0.1); + } + + #[test] + fn test_fan_total_pressure() { + let fan = create_test_fan_connected(); + + // With a duct area of 0.5 m² + let total_p = fan.total_pressure_rise(1.0, 0.5); + let static_p = fan.static_pressure_rise(1.0); + + // Total > Static due to velocity pressure + assert!(total_p > static_p); + } + + #[test] + fn test_fan_component_n_equations() { + let fan = create_test_fan_connected(); + assert_eq!(fan.n_equations(), 2); + } + + #[test] + fn test_fan_state_manageable() { + let fan = create_test_fan_connected(); + assert_eq!(fan.state(), OperationalState::On); + assert!(fan.can_transition_to(OperationalState::Off)); + } + + #[test] + fn test_standard_air_constants() { + assert_relative_eq!(standard_air::DENSITY, 1.204, epsilon = 0.01); + assert_relative_eq!(standard_air::CP, 1005.0); + } +} diff --git a/crates/components/src/heat_exchanger/condenser.rs b/crates/components/src/heat_exchanger/condenser.rs new file mode 100644 index 0000000..ca2d19d --- /dev/null +++ b/crates/components/src/heat_exchanger/condenser.rs @@ -0,0 +1,249 @@ +//! Condenser Component +//! +//! A heat exchanger configured for refrigerant condensation. +//! The refrigerant (hot side) condenses from superheated vapor to +//! subcooled liquid, releasing heat to the cold side. + +use super::exchanger::HeatExchanger; +use super::lmtd::{FlowConfiguration, LmtdModel}; +use entropyk_core::Calib; +use crate::{ + Component, ComponentError, ConnectedPort, JacobianBuilder, ResidualVector, SystemState, +}; +use crate::state_machine::{CircuitId, OperationalState, StateManageable}; + +/// Condenser heat exchanger. +/// +/// Uses the LMTD method for heat transfer calculation. +/// The refrigerant condenses on the hot side, releasing heat +/// to the cold side (typically water or air). +/// +/// # Configuration +/// +/// - Hot side: Refrigerant condensing (phase change) +/// - Cold side: Heat sink (water, air, etc.) +/// +/// # Example +/// +/// ``` +/// use entropyk_components::heat_exchanger::Condenser; +/// use entropyk_components::Component; +/// +/// let condenser = Condenser::new(10_000.0); // UA = 10 kW/K +/// assert_eq!(condenser.n_equations(), 3); +/// ``` +#[derive(Debug)] +pub struct Condenser { + /// Inner heat exchanger with LMTD model + inner: HeatExchanger, + /// Saturation temperature for condensation (K) + saturation_temp: f64, +} + +impl Condenser { + /// Creates a new condenser with the given UA value. + /// + /// # Arguments + /// + /// * `ua` - Overall heat transfer coefficient × Area (W/K) + /// + /// # Example + /// + /// ``` + /// use entropyk_components::heat_exchanger::Condenser; + /// + /// let condenser = Condenser::new(15_000.0); + /// ``` + pub fn new(ua: f64) -> Self { + let model = LmtdModel::new(ua, FlowConfiguration::CounterFlow); + Self { + inner: HeatExchanger::new(model, "Condenser"), + saturation_temp: 323.15, + } + } + + /// Creates a condenser with a specific saturation temperature. + pub fn with_saturation_temp(ua: f64, saturation_temp: f64) -> Self { + let model = LmtdModel::new(ua, FlowConfiguration::CounterFlow); + Self { + inner: HeatExchanger::new(model, "Condenser"), + saturation_temp, + } + } + + /// Returns the name of this condenser. + pub fn name(&self) -> &str { + self.inner.name() + } + + /// Returns the UA value (effective: f_ua × UA_nominal). + pub fn ua(&self) -> f64 { + self.inner.ua() + } + + /// Returns calibration factors (f_ua for condenser). + pub fn calib(&self) -> &Calib { + self.inner.calib() + } + + /// Sets calibration factors. + pub fn set_calib(&mut self, calib: Calib) { + self.inner.set_calib(calib); + } + + /// Returns the saturation temperature. + pub fn saturation_temp(&self) -> f64 { + self.saturation_temp + } + + /// Sets the saturation temperature. + pub fn set_saturation_temp(&mut self, temp: f64) { + self.saturation_temp = temp; + } + + /// Validates that the outlet quality is <= 1 (fully condensed or subcooled). + /// + /// # Arguments + /// + /// * `outlet_enthalpy` - Outlet specific enthalpy (J/kg) + /// * `h_liquid` - Saturated liquid enthalpy at condensing pressure + /// * `h_vapor` - Saturated vapor enthalpy at condensing pressure + /// + /// # Returns + /// + /// Returns Ok(true) if fully condensed, Err otherwise + pub fn validate_outlet_quality( + &self, + outlet_enthalpy: f64, + h_liquid: f64, + h_vapor: f64, + ) -> Result { + if h_vapor <= h_liquid { + return Err(ComponentError::NumericalError( + "Invalid saturation enthalpies".to_string(), + )); + } + + let quality = (outlet_enthalpy - h_liquid) / (h_vapor - h_liquid); + + if quality <= 1.0 + 1e-6 { + Ok(true) + } else { + Err(ComponentError::InvalidState(format!( + "Condenser outlet quality {} > 1 (superheated)", + quality + ))) + } + } + + /// Computes the full thermodynamic state at the hot inlet. + pub fn hot_inlet_state(&self) -> Result { + self.inner.hot_inlet_state() + } + + /// Computes the full thermodynamic state at the cold inlet. + pub fn cold_inlet_state(&self) -> Result { + self.inner.cold_inlet_state() + } +} + +impl Component for Condenser { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + self.inner.compute_residuals(state, residuals) + } + + fn jacobian_entries( + &self, + state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + self.inner.jacobian_entries(state, jacobian) + } + + fn n_equations(&self) -> usize { + self.inner.n_equations() + } + + fn get_ports(&self) -> &[ConnectedPort] { + self.inner.get_ports() + } +} + +impl StateManageable for Condenser { + fn state(&self) -> OperationalState { + self.inner.state() + } + + fn set_state(&mut self, state: OperationalState) -> Result<(), ComponentError> { + self.inner.set_state(state) + } + + fn can_transition_to(&self, target: OperationalState) -> bool { + self.inner.can_transition_to(target) + } + + fn circuit_id(&self) -> &CircuitId { + self.inner.circuit_id() + } + + fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.inner.set_circuit_id(circuit_id); + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_condenser_creation() { + let condenser = Condenser::new(10_000.0); + assert_eq!(condenser.ua(), 10_000.0); + assert_eq!(condenser.n_equations(), 3); + } + + #[test] + fn test_condenser_with_saturation_temp() { + let condenser = Condenser::with_saturation_temp(10_000.0, 323.15); + assert_eq!(condenser.saturation_temp(), 323.15); + } + + #[test] + fn test_validate_outlet_quality_fully_condensed() { + let condenser = Condenser::new(10_000.0); + + let h_liquid = 200_000.0; + let h_vapor = 400_000.0; + let outlet_h = 180_000.0; + + let result = condenser.validate_outlet_quality(outlet_h, h_liquid, h_vapor); + assert!(result.is_ok()); + } + + #[test] + fn test_validate_outlet_quality_superheated() { + let condenser = Condenser::new(10_000.0); + + let h_liquid = 200_000.0; + let h_vapor = 400_000.0; + let outlet_h = 450_000.0; + + let result = condenser.validate_outlet_quality(outlet_h, h_liquid, h_vapor); + assert!(result.is_err()); + } + + #[test] + fn test_compute_residuals() { + let condenser = Condenser::new(10_000.0); + + let state = vec![0.0; 10]; + let mut residuals = vec![0.0; 3]; + + let result = condenser.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + } +} diff --git a/crates/components/src/heat_exchanger/condenser_coil.rs b/crates/components/src/heat_exchanger/condenser_coil.rs new file mode 100644 index 0000000..1ceeb60 --- /dev/null +++ b/crates/components/src/heat_exchanger/condenser_coil.rs @@ -0,0 +1,195 @@ +//! Condenser Coil Component +//! +//! An air-side (finned) heat exchanger for refrigerant condensation. +//! The refrigerant (hot side) condenses, releasing heat to air (cold side). +//! Used in split systems and air-source heat pumps. +//! +//! ## Port Convention +//! +//! - **Hot side (refrigerant)**: Condensing +//! - **Cold side (air)**: Heat sink — connect to Fan outlet/inlet +//! +//! ## Integration with Fan +//! +//! Connect Fan outlet → CondenserCoil air inlet, CondenserCoil air outlet → Fan inlet. +//! Use `FluidId::new("Air")` for air ports. + +use super::condenser::Condenser; +use crate::{ + Component, ComponentError, ConnectedPort, JacobianBuilder, ResidualVector, SystemState, +}; +use crate::state_machine::{CircuitId, OperationalState, StateManageable}; + +/// Condenser coil (air-side finned heat exchanger). +/// +/// Explicit component for air-source condensers. Uses LMTD method. +/// Refrigerant condenses on hot side, air on cold side. +/// +/// # Example +/// +/// ``` +/// use entropyk_components::heat_exchanger::CondenserCoil; +/// use entropyk_components::Component; +/// +/// let coil = CondenserCoil::new(10_000.0); // UA = 10 kW/K +/// assert_eq!(coil.ua(), 10_000.0); +/// assert_eq!(coil.n_equations(), 3); +/// ``` +#[derive(Debug)] +pub struct CondenserCoil { + inner: Condenser, +} + +impl CondenserCoil { + /// Creates a new condenser coil with the given UA value. + /// + /// # Arguments + /// + /// * `ua` - Overall heat transfer coefficient × Area (W/K) + pub fn new(ua: f64) -> Self { + Self { + inner: Condenser::new(ua), + } + } + + /// Creates a condenser coil with a specific saturation temperature. + pub fn with_saturation_temp(ua: f64, saturation_temp: f64) -> Self { + Self { + inner: Condenser::with_saturation_temp(ua, saturation_temp), + } + } + + /// Returns the name of this component. + pub fn name(&self) -> &str { + "CondenserCoil" + } + + /// Returns the UA value. + pub fn ua(&self) -> f64 { + self.inner.ua() + } + + /// Returns the saturation temperature. + pub fn saturation_temp(&self) -> f64 { + self.inner.saturation_temp() + } + + /// Sets the saturation temperature. + pub fn set_saturation_temp(&mut self, temp: f64) { + self.inner.set_saturation_temp(temp); + } +} + +impl Component for CondenserCoil { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + self.inner.compute_residuals(state, residuals) + } + + fn jacobian_entries( + &self, + state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + self.inner.jacobian_entries(state, jacobian) + } + + fn n_equations(&self) -> usize { + self.inner.n_equations() + } + + fn get_ports(&self) -> &[ConnectedPort] { + self.inner.get_ports() + } +} + +impl StateManageable for CondenserCoil { + fn state(&self) -> OperationalState { + self.inner.state() + } + + fn set_state(&mut self, state: OperationalState) -> Result<(), ComponentError> { + self.inner.set_state(state) + } + + fn can_transition_to(&self, target: OperationalState) -> bool { + self.inner.can_transition_to(target) + } + + fn circuit_id(&self) -> &CircuitId { + self.inner.circuit_id() + } + + fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.inner.set_circuit_id(circuit_id); + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_condenser_coil_creation() { + let coil = CondenserCoil::new(10_000.0); + assert_eq!(coil.ua(), 10_000.0); + assert_eq!(coil.name(), "CondenserCoil"); + } + + #[test] + fn test_condenser_coil_n_equations() { + let coil = CondenserCoil::new(10_000.0); + assert_eq!(coil.n_equations(), 3); + } + + #[test] + fn test_condenser_coil_with_saturation_temp() { + let coil = CondenserCoil::with_saturation_temp(10_000.0, 323.15); + assert_eq!(coil.saturation_temp(), 323.15); + } + + #[test] + fn test_condenser_coil_compute_residuals() { + let coil = CondenserCoil::new(10_000.0); + let state = vec![0.0; 10]; + let mut residuals = vec![0.0; 3]; + let result = coil.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + assert!(residuals.iter().all(|r| r.is_finite()), "residuals must be finite"); + } + + #[test] + fn test_condenser_coil_jacobian_entries() { + let coil = CondenserCoil::new(10_000.0); + let state = vec![0.0; 10]; + let mut jacobian = crate::JacobianBuilder::new(); + let result = coil.jacobian_entries(&state, &mut jacobian); + assert!(result.is_ok()); + // HeatExchanger base returns empty jacobian until framework implements it + assert!( + jacobian.is_empty(), + "delegation works; empty jacobian expected until HeatExchanger implements entries" + ); + } + + #[test] + fn test_condenser_coil_set_saturation_temp() { + let mut coil = CondenserCoil::new(10_000.0); + coil.set_saturation_temp(320.0); + assert!((coil.saturation_temp() - 320.0).abs() < 1e-10); + } + + #[test] + fn test_condenser_coil_state_manageable() { + use crate::state_machine::{OperationalState, StateManageable}; + + let mut coil = CondenserCoil::new(10_000.0); + assert_eq!(coil.state(), OperationalState::On); + assert!(coil.can_transition_to(OperationalState::Off)); + assert!(coil.set_state(OperationalState::Off).is_ok()); + assert_eq!(coil.state(), OperationalState::Off); + } +} diff --git a/crates/components/src/heat_exchanger/economizer.rs b/crates/components/src/heat_exchanger/economizer.rs new file mode 100644 index 0000000..d230b6b --- /dev/null +++ b/crates/components/src/heat_exchanger/economizer.rs @@ -0,0 +1,251 @@ +//! Economizer Component +//! +//! An internal heat exchanger with bypass support for refrigeration systems. +/// Can be switched between ON (active heat exchange), OFF (no flow), and +/// BYPASS (adiabatic pipe) modes. +use super::exchanger::HeatExchanger; +use super::lmtd::{FlowConfiguration, LmtdModel}; +use crate::{ + Component, ComponentError, ConnectedPort, JacobianBuilder, OperationalState, ResidualVector, + SystemState, +}; + +/// Economizer (internal heat exchanger) with state machine support. +/// +/// The economizer can operate in three modes: +/// - **ON**: Normal heat exchange between suction and liquid lines +/// - **OFF**: No mass flow contribution (component disabled) +/// - **BYPASS**: Adiabatic pipe (P_in = P_out, h_in = h_out) +/// +/// # Example +/// +/// ``` +/// use entropyk_components::heat_exchanger::Economizer; +/// use entropyk_components::OperationalState; +/// +/// let mut economizer = Economizer::new(2_000.0); +/// assert_eq!(economizer.state(), OperationalState::On); +/// +/// economizer.set_state(OperationalState::Bypass); +/// assert_eq!(economizer.state(), OperationalState::Bypass); +/// ``` +#[derive(Debug)] +pub struct Economizer { + /// Inner heat exchanger with LMTD model + inner: HeatExchanger, + /// Operational state + state: OperationalState, +} + +impl Economizer { + /// Creates a new economizer with the given UA value. + /// + /// # Arguments + /// + /// * `ua` - Overall heat transfer coefficient × Area (W/K) + /// + /// # Example + /// + /// ``` + /// use entropyk_components::heat_exchanger::Economizer; + /// + /// let economizer = Economizer::new(2_000.0); + /// ``` + pub fn new(ua: f64) -> Self { + let model = LmtdModel::new(ua, FlowConfiguration::CounterFlow); + Self { + inner: HeatExchanger::new(model, "Economizer"), + state: OperationalState::On, + } + } + + /// Creates an economizer in a specific state. + pub fn with_state(ua: f64, state: OperationalState) -> Self { + let model = LmtdModel::new(ua, FlowConfiguration::CounterFlow); + Self { + inner: HeatExchanger::new(model, "Economizer"), + state, + } + } + + /// Returns the name of this economizer. + pub fn name(&self) -> &str { + self.inner.name() + } + + /// Returns the UA value. + pub fn ua(&self) -> f64 { + self.inner.ua() + } + + /// Returns the current operational state. + pub fn state(&self) -> OperationalState { + self.state + } + + /// Sets the operational state. + pub fn set_state(&mut self, state: OperationalState) { + self.state = state; + } + + /// Returns true if the economizer is active (ON or BYPASS). + pub fn is_active(&self) -> bool { + self.state.is_active() + } + + /// Returns true if in bypass mode (adiabatic pipe behavior). + pub fn is_bypass(&self) -> bool { + self.state.is_bypass() + } + + /// Returns the mass flow multiplier based on state. + pub fn mass_flow_multiplier(&self) -> f64 { + self.state.mass_flow_multiplier() + } + + /// Computes bypass residuals (P_in = P_out, h_in = h_out). + fn compute_bypass_residuals(&self, residuals: &mut ResidualVector) { + residuals[0] = 0.0; + residuals[1] = 0.0; + residuals[2] = 0.0; + } + + /// Computes off residuals (zero flow). + fn compute_off_residuals(&self, residuals: &mut ResidualVector) { + residuals[0] = 0.0; + residuals[1] = 0.0; + residuals[2] = 0.0; + } +} + +impl Component for Economizer { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + if residuals.len() < self.n_equations() { + return Err(ComponentError::InvalidResidualDimensions { + expected: self.n_equations(), + actual: residuals.len(), + }); + } + + match self.state { + OperationalState::On => self.inner.compute_residuals(state, residuals), + OperationalState::Off => { + self.compute_off_residuals(residuals); + Ok(()) + } + OperationalState::Bypass => { + self.compute_bypass_residuals(residuals); + Ok(()) + } + } + } + + fn jacobian_entries( + &self, + state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + match self.state { + OperationalState::On => self.inner.jacobian_entries(state, jacobian), + OperationalState::Off | OperationalState::Bypass => Ok(()), + } + } + + fn n_equations(&self) -> usize { + self.inner.n_equations() + } + + fn get_ports(&self) -> &[ConnectedPort] { + self.inner.get_ports() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_economizer_creation() { + let economizer = Economizer::new(2_000.0); + assert_eq!(economizer.ua(), 2_000.0); + assert_eq!(economizer.state(), OperationalState::On); + } + + #[test] + fn test_economizer_with_state() { + let economizer = Economizer::with_state(2_000.0, OperationalState::Bypass); + assert_eq!(economizer.state(), OperationalState::Bypass); + } + + #[test] + fn test_state_transitions() { + let mut economizer = Economizer::new(2_000.0); + + assert!(economizer.is_active()); + assert!(!economizer.is_bypass()); + + economizer.set_state(OperationalState::Bypass); + assert!(economizer.is_active()); + assert!(economizer.is_bypass()); + + economizer.set_state(OperationalState::Off); + assert!(!economizer.is_active()); + assert!(!economizer.is_bypass()); + } + + #[test] + fn test_mass_flow_multiplier() { + let mut economizer = Economizer::new(2_000.0); + + assert_eq!(economizer.mass_flow_multiplier(), 1.0); + + economizer.set_state(OperationalState::Bypass); + assert_eq!(economizer.mass_flow_multiplier(), 1.0); + + economizer.set_state(OperationalState::Off); + assert_eq!(economizer.mass_flow_multiplier(), 0.0); + } + + #[test] + fn test_compute_residuals_on() { + let economizer = Economizer::new(2_000.0); + + let state = vec![0.0; 10]; + let mut residuals = vec![0.0; 3]; + + let result = economizer.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + } + + #[test] + fn test_compute_residuals_bypass() { + let economizer = Economizer::with_state(2_000.0, OperationalState::Bypass); + + let state = vec![0.0; 10]; + let mut residuals = vec![0.0; 3]; + + let result = economizer.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + } + + #[test] + fn test_compute_residuals_off() { + let economizer = Economizer::with_state(2_000.0, OperationalState::Off); + + let state = vec![0.0; 10]; + let mut residuals = vec![0.0; 3]; + + let result = economizer.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + } + + #[test] + fn test_n_equations() { + let economizer = Economizer::new(2_000.0); + assert_eq!(economizer.n_equations(), 3); + } +} diff --git a/crates/components/src/heat_exchanger/eps_ntu.rs b/crates/components/src/heat_exchanger/eps_ntu.rs new file mode 100644 index 0000000..787cef2 --- /dev/null +++ b/crates/components/src/heat_exchanger/eps_ntu.rs @@ -0,0 +1,344 @@ +//! Effectiveness-NTU (ε-NTU) Model +//! +//! Implements the ε-NTU method for heat exchanger calculations. +//! +//! ## Theory +//! +//! The heat transfer rate is calculated as: +//! +//! $$\dot{Q} = \varepsilon \cdot \dot{Q}_{max} = \varepsilon \cdot C_{min} \cdot (T_{hot,in} - T_{cold,in})$$ +//! +//! Where: +//! - $\varepsilon$: Effectiveness (0 to 1) +//! - $C_{min} = \min(\dot{m}_{hot} \cdot c_{p,hot}, \dot{m}_{cold} \cdot c_{p,cold})$: Minimum heat capacity rate +//! - $NTU = UA / C_{min}$: Number of Transfer Units +//! - $C_r = C_{min} / C_{max}$: Heat capacity ratio +//! +//! ## Zero-flow regularization (Story 3.5) +//! +//! When $C_{min} < 10^{-10}$ (e.g. zero mass flow on one side), heat transfer is set to zero +//! and divisions by $C_{min}$ or $C_r$ are avoided to prevent NaN/Inf. +//! +//! Note: This module uses `1e-10` kW/K for capacity rate regularization, which is appropriate +//! for the kW/K scale. For mass flow regularization at the kg/s scale, see +//! [`MIN_MASS_FLOW_REGULARIZATION_KG_S`](entropyk_core::MIN_MASS_FLOW_REGULARIZATION_KG_S). +//! +//! For counter-flow: +//! $$\varepsilon = \frac{1 - \exp(-NTU \cdot (1 - C_r))}{1 - C_r \cdot \exp(-NTU \cdot (1 - C_r))}$$ + +use super::model::{FluidState, HeatTransferModel}; +use crate::ResidualVector; +use entropyk_core::Power; + +/// Heat exchanger type for ε-NTU calculations. +#[derive(Debug, Clone, Copy, PartialEq, Default)] +pub enum ExchangerType { + /// Counter-flow (most efficient) + #[default] + CounterFlow, + /// Parallel-flow (co-current) + ParallelFlow, + /// Cross-flow, both fluids unmixed + CrossFlowUnmixed, + /// Cross-flow, one fluid mixed (C_max mixed) + CrossFlowMixedMax, + /// Cross-flow, one fluid mixed (C_min mixed) + CrossFlowMixedMin, + /// Shell-and-tube with specified number of shell passes + ShellAndTube { + /// Number of shell passes + passes: usize, + }, +} + +/// ε-NTU (Effectiveness-NTU) heat transfer model. +/// +/// Uses the effectiveness-NTU method for heat exchanger rating. +/// +/// # Example +/// +/// ``` +/// use entropyk_components::heat_exchanger::{EpsNtuModel, ExchangerType, HeatTransferModel}; +/// +/// let model = EpsNtuModel::new(5000.0, ExchangerType::CounterFlow); +/// assert_eq!(model.ua(), 5000.0); +/// ``` +#[derive(Debug, Clone)] +pub struct EpsNtuModel { + /// Overall heat transfer coefficient × Area (W/K), nominal + ua: f64, + /// UA calibration scale: UA_eff = ua_scale × ua (default 1.0) + ua_scale: f64, + /// Heat exchanger type + exchanger_type: ExchangerType, +} + +impl EpsNtuModel { + /// Creates a new ε-NTU model. + /// + /// # Arguments + /// + /// * `ua` - Overall heat transfer coefficient × Area (W/K). Must be non-negative. + /// * `exchanger_type` - Type of heat exchanger + /// + /// # Panics + /// + /// Panics if `ua` is negative or NaN. + pub fn new(ua: f64, exchanger_type: ExchangerType) -> Self { + assert!( + ua.is_finite() && ua >= 0.0, + "UA must be non-negative and finite, got {}", + ua + ); + Self { + ua, + ua_scale: 1.0, + exchanger_type, + } + } + + /// Creates a counter-flow ε-NTU model. + pub fn counter_flow(ua: f64) -> Self { + Self::new(ua, ExchangerType::CounterFlow) + } + + /// Creates a parallel-flow ε-NTU model. + pub fn parallel_flow(ua: f64) -> Self { + Self::new(ua, ExchangerType::ParallelFlow) + } + + /// Creates a cross-flow (unmixed) ε-NTU model. + pub fn cross_flow_unmixed(ua: f64) -> Self { + Self::new(ua, ExchangerType::CrossFlowUnmixed) + } + + /// Calculates the effectiveness ε. + /// + /// # Arguments + /// + /// * `ntu` - Number of Transfer Units (UA / C_min) + /// * `c_r` - Heat capacity ratio (C_min / C_max) + /// + /// # Returns + /// + /// The effectiveness ε (0 to 1) + pub fn effectiveness(&self, ntu: f64, c_r: f64) -> f64 { + if ntu <= 0.0 { + return 0.0; + } + + match self.exchanger_type { + ExchangerType::CounterFlow => { + if c_r < 1e-10 { + 1.0 - (-ntu).exp() + } else { + let exp_term = (-ntu * (1.0 - c_r)).exp(); + (1.0 - exp_term) / (1.0 - c_r * exp_term) + } + } + ExchangerType::ParallelFlow => { + if c_r < 1e-10 { + 1.0 - (-ntu).exp() + } else { + (1.0 - (-ntu * (1.0 + c_r)).exp()) / (1.0 + c_r) + } + } + ExchangerType::CrossFlowUnmixed => { + if c_r < 1e-10 { + 1.0 - (-ntu).exp() + } else { + 1.0 - (-c_r * (1.0 - (-ntu / c_r).exp())).exp() + } + } + ExchangerType::CrossFlowMixedMax => { + if c_r < 1e-10 { + 1.0 - (-ntu).exp() + } else { + let ntu_c_r = ntu / c_r; + (1.0 - (-ntu_c_r).exp()) / c_r * (1.0 - (-c_r * ntu).exp()) + } + } + ExchangerType::CrossFlowMixedMin => { + if c_r < 1e-10 { + 1.0 - (-ntu).exp() + } else { + (1.0 / c_r) * (1.0 - (-c_r * (1.0 - (-ntu).exp())).exp()) + } + } + ExchangerType::ShellAndTube { passes: _ } => { + if c_r < 1e-10 { + 1.0 - (-ntu).exp() + } else { + (1.0 - (-ntu * (1.0 + c_r * c_r).sqrt()).exp()) / (1.0 + c_r) + } + } + } + } + + /// Calculates the maximum possible heat transfer rate. + /// + /// Q̇_max = C_min × (T_hot,in - T_cold,in) + pub fn q_max(&self, c_min: f64, t_hot_in: f64, t_cold_in: f64) -> f64 { + c_min * (t_hot_in - t_cold_in).max(0.0) + } +} + +impl HeatTransferModel for EpsNtuModel { + fn compute_heat_transfer( + &self, + hot_inlet: &FluidState, + _hot_outlet: &FluidState, + cold_inlet: &FluidState, + _cold_outlet: &FluidState, + ) -> Power { + let c_hot = hot_inlet.heat_capacity_rate(); + let c_cold = cold_inlet.heat_capacity_rate(); + + let (c_min, c_max) = if c_hot < c_cold { + (c_hot, c_cold) + } else { + (c_cold, c_hot) + }; + + if c_min < 1e-10 { + return Power::from_watts(0.0); + } + + let c_r = c_min / c_max; + let ntu = self.effective_ua() / c_min; + + let effectiveness = self.effectiveness(ntu, c_r); + + let q_max = self.q_max(c_min, hot_inlet.temperature, cold_inlet.temperature); + + Power::from_watts(effectiveness * q_max) + } + + fn compute_residuals( + &self, + hot_inlet: &FluidState, + hot_outlet: &FluidState, + cold_inlet: &FluidState, + cold_outlet: &FluidState, + residuals: &mut ResidualVector, + ) { + let q = self + .compute_heat_transfer(hot_inlet, hot_outlet, cold_inlet, cold_outlet) + .to_watts(); + + let q_hot = + hot_inlet.mass_flow * hot_inlet.cp * (hot_inlet.temperature - hot_outlet.temperature); + let q_cold = cold_inlet.mass_flow + * cold_inlet.cp + * (cold_outlet.temperature - cold_inlet.temperature); + + residuals[0] = q_hot - q; + residuals[1] = q_cold - q; + residuals[2] = q_hot - q_cold; + } + + fn n_equations(&self) -> usize { + 3 + } + + fn ua(&self) -> f64 { + self.ua + } + + fn ua_scale(&self) -> f64 { + self.ua_scale + } + + fn set_ua_scale(&mut self, s: f64) { + self.ua_scale = s; + } + + fn effective_ua(&self) -> f64 { + self.ua * self.ua_scale + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_eps_ntu_model_creation() { + let model = EpsNtuModel::new(5000.0, ExchangerType::CounterFlow); + assert_eq!(model.ua(), 5000.0); + } + + #[test] + fn test_effectiveness_counter_flow() { + let model = EpsNtuModel::counter_flow(5000.0); + + let eps = model.effectiveness(5.0, 0.5); + assert!(eps > 0.0 && eps < 1.0); + + let eps_cr_zero = model.effectiveness(5.0, 0.0); + assert!((eps_cr_zero - (1.0 - (-5.0_f64).exp())).abs() < 1e-10); + } + + #[test] + fn test_effectiveness_parallel_flow() { + let model = EpsNtuModel::parallel_flow(5000.0); + + let eps = model.effectiveness(5.0, 0.5); + assert!(eps > 0.0 && eps < 1.0); + assert!(eps < model.effectiveness(5.0, 0.5) + 0.1); + } + + #[test] + fn test_effectiveness_zero_ntu() { + let model = EpsNtuModel::counter_flow(5000.0); + let eps = model.effectiveness(0.0, 0.5); + assert_eq!(eps, 0.0); + } + + #[test] + fn test_compute_heat_transfer() { + let model = EpsNtuModel::counter_flow(5000.0); + + let hot_inlet = FluidState::new(80.0 + 273.15, 101_325.0, 400_000.0, 0.1, 1000.0); + let hot_outlet = FluidState::new(60.0 + 273.15, 101_325.0, 380_000.0, 0.1, 1000.0); + let cold_inlet = FluidState::new(20.0 + 273.15, 101_325.0, 80_000.0, 0.2, 4180.0); + let cold_outlet = FluidState::new(30.0 + 273.15, 101_325.0, 120_000.0, 0.2, 4180.0); + + let q = model.compute_heat_transfer(&hot_inlet, &hot_outlet, &cold_inlet, &cold_outlet); + + assert!(q.to_watts() > 0.0); + } + + #[test] + fn test_n_equations() { + let model = EpsNtuModel::counter_flow(1000.0); + assert_eq!(model.n_equations(), 3); + } + + #[test] + fn test_q_max() { + let model = EpsNtuModel::counter_flow(5000.0); + + let c_min = 1000.0; + let t_hot_in = 350.0; + let t_cold_in = 300.0; + + let q_max = model.q_max(c_min, t_hot_in, t_cold_in); + assert_eq!(q_max, 50_000.0); + } + + #[test] + #[should_panic(expected = "UA must be non-negative")] + fn test_negative_ua_panics() { + let _model = EpsNtuModel::new(-1000.0, ExchangerType::CounterFlow); + } + + #[test] + fn test_effectiveness_cross_flow_unmixed_cr_zero() { + let model = EpsNtuModel::cross_flow_unmixed(5000.0); + + let eps = model.effectiveness(5.0, 0.0); + let expected = 1.0 - (-5.0_f64).exp(); + assert!((eps - expected).abs() < 1e-10); + } +} diff --git a/crates/components/src/heat_exchanger/evaporator.rs b/crates/components/src/heat_exchanger/evaporator.rs new file mode 100644 index 0000000..b9ce947 --- /dev/null +++ b/crates/components/src/heat_exchanger/evaporator.rs @@ -0,0 +1,292 @@ +//! Evaporator Component +//! +//! A heat exchanger configured for refrigerant evaporation. +//! The refrigerant (cold side) evaporates from two-phase mixture to +/// superheated vapor, absorbing heat from the hot side. +use super::eps_ntu::{EpsNtuModel, ExchangerType}; +use super::exchanger::HeatExchanger; +use entropyk_core::Calib; +use crate::{ + Component, ComponentError, ConnectedPort, JacobianBuilder, ResidualVector, SystemState, +}; +use crate::state_machine::{CircuitId, OperationalState, StateManageable}; + +/// Evaporator heat exchanger. +/// +/// Uses the ε-NTU method for heat transfer calculation. +/// The refrigerant evaporates on the cold side, absorbing heat +/// from the hot side (typically water or air). +/// +/// # Configuration +/// +/// - Hot side: Heat source (water, air, etc.) +/// - Cold side: Refrigerant evaporating (phase change) +/// +/// # Example +/// +/// ``` +/// use entropyk_components::heat_exchanger::Evaporator; +/// use entropyk_components::Component; +/// +/// let evaporator = Evaporator::new(8_000.0); // UA = 8 kW/K +/// assert_eq!(evaporator.n_equations(), 3); +/// ``` +#[derive(Debug)] +pub struct Evaporator { + /// Inner heat exchanger with ε-NTU model + inner: HeatExchanger, + /// Saturation temperature for evaporation (K) + saturation_temp: f64, + /// Target superheat (K) + superheat_target: f64, +} + +impl Evaporator { + /// Creates a new evaporator with the given UA value. + /// + /// # Arguments + /// + /// * `ua` - Overall heat transfer coefficient × Area (W/K) + /// + /// # Example + /// + /// ``` + /// use entropyk_components::heat_exchanger::Evaporator; + /// + /// let evaporator = Evaporator::new(8_000.0); + /// ``` + pub fn new(ua: f64) -> Self { + let model = EpsNtuModel::new(ua, ExchangerType::CounterFlow); + Self { + inner: HeatExchanger::new(model, "Evaporator"), + saturation_temp: 278.15, + superheat_target: 5.0, + } + } + + /// Creates an evaporator with specific saturation and superheat. + pub fn with_superheat(ua: f64, saturation_temp: f64, superheat_target: f64) -> Self { + let model = EpsNtuModel::new(ua, ExchangerType::CounterFlow); + Self { + inner: HeatExchanger::new(model, "Evaporator"), + saturation_temp, + superheat_target, + } + } + + /// Returns the name of this evaporator. + pub fn name(&self) -> &str { + self.inner.name() + } + + /// Returns the UA value (effective: f_ua × UA_nominal). + pub fn ua(&self) -> f64 { + self.inner.ua() + } + + /// Returns calibration factors (f_ua for evaporator). + pub fn calib(&self) -> &Calib { + self.inner.calib() + } + + /// Sets calibration factors. + pub fn set_calib(&mut self, calib: Calib) { + self.inner.set_calib(calib); + } + + /// Returns the saturation temperature. + pub fn saturation_temp(&self) -> f64 { + self.saturation_temp + } + + /// Returns the superheat target. + pub fn superheat_target(&self) -> f64 { + self.superheat_target + } + + /// Sets the saturation temperature. + pub fn set_saturation_temp(&mut self, temp: f64) { + self.saturation_temp = temp; + } + + /// Sets the superheat target. + pub fn set_superheat_target(&mut self, superheat: f64) { + self.superheat_target = superheat; + } + + /// Validates that the outlet quality is >= 0 (fully evaporated or superheated). + /// + /// # Arguments + /// + /// * `outlet_enthalpy` - Outlet specific enthalpy (J/kg) + /// * `h_liquid` - Saturated liquid enthalpy at evaporating pressure + /// * `h_vapor` - Saturated vapor enthalpy at evaporating pressure + /// + /// # Returns + /// + /// Returns Ok(superheat) if valid, Err otherwise + pub fn validate_outlet_quality( + &self, + outlet_enthalpy: f64, + h_liquid: f64, + h_vapor: f64, + cp_vapor: f64, + ) -> Result { + if h_vapor <= h_liquid { + return Err(ComponentError::NumericalError( + "Invalid saturation enthalpies".to_string(), + )); + } + + let quality = (outlet_enthalpy - h_liquid) / (h_vapor - h_liquid); + + if quality >= 0.0 - 1e-6 { + if outlet_enthalpy >= h_vapor { + let superheat = (outlet_enthalpy - h_vapor) / cp_vapor; + Ok(superheat) + } else { + Ok(0.0) + } + } else { + Err(ComponentError::InvalidState(format!( + "Evaporator outlet quality {} < 0 (subcooled)", + quality + ))) + } + } + + /// Calculates the superheat residual for inverse control. + /// + /// Returns (actual_superheat - target_superheat) + pub fn superheat_residual(&self, actual_superheat: f64) -> f64 { + actual_superheat - self.superheat_target + } + + /// Computes the full thermodynamic state at the hot inlet. + pub fn hot_inlet_state(&self) -> Result { + self.inner.hot_inlet_state() + } + + /// Computes the full thermodynamic state at the cold inlet. + pub fn cold_inlet_state(&self) -> Result { + self.inner.cold_inlet_state() + } +} + +impl Component for Evaporator { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + self.inner.compute_residuals(state, residuals) + } + + fn jacobian_entries( + &self, + state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + self.inner.jacobian_entries(state, jacobian) + } + + fn n_equations(&self) -> usize { + self.inner.n_equations() + } + + fn get_ports(&self) -> &[ConnectedPort] { + self.inner.get_ports() + } +} + +impl StateManageable for Evaporator { + fn state(&self) -> OperationalState { + self.inner.state() + } + + fn set_state(&mut self, state: OperationalState) -> Result<(), ComponentError> { + self.inner.set_state(state) + } + + fn can_transition_to(&self, target: OperationalState) -> bool { + self.inner.can_transition_to(target) + } + + fn circuit_id(&self) -> &CircuitId { + self.inner.circuit_id() + } + + fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.inner.set_circuit_id(circuit_id); + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_evaporator_creation() { + let evaporator = Evaporator::new(8_000.0); + assert_eq!(evaporator.ua(), 8_000.0); + assert_eq!(evaporator.n_equations(), 3); + } + + #[test] + fn test_evaporator_with_superheat() { + let evaporator = Evaporator::with_superheat(8_000.0, 278.15, 10.0); + assert_eq!(evaporator.saturation_temp(), 278.15); + assert_eq!(evaporator.superheat_target(), 10.0); + } + + #[test] + fn test_validate_outlet_quality_superheated() { + let evaporator = Evaporator::new(8_000.0); + + let h_liquid = 200_000.0; + let h_vapor = 400_000.0; + let outlet_h = 420_000.0; + let cp_vapor = 1000.0; + + let result = evaporator.validate_outlet_quality(outlet_h, h_liquid, h_vapor, cp_vapor); + assert!(result.is_ok()); + + let superheat = result.unwrap(); + assert!((superheat - 20.0).abs() < 1e-10); + } + + #[test] + fn test_validate_outlet_quality_subcooled() { + let evaporator = Evaporator::new(8_000.0); + + let h_liquid = 200_000.0; + let h_vapor = 400_000.0; + let outlet_h = 150_000.0; + let cp_vapor = 1000.0; + + let result = evaporator.validate_outlet_quality(outlet_h, h_liquid, h_vapor, cp_vapor); + assert!(result.is_err()); + } + + #[test] + fn test_superheat_residual() { + let evaporator = Evaporator::with_superheat(8_000.0, 278.15, 5.0); + + let residual = evaporator.superheat_residual(7.0); + assert!((residual - 2.0).abs() < 1e-10); + + let residual = evaporator.superheat_residual(3.0); + assert!((residual - (-2.0)).abs() < 1e-10); + } + + #[test] + fn test_compute_residuals() { + let evaporator = Evaporator::new(8_000.0); + + let state = vec![0.0; 10]; + let mut residuals = vec![0.0; 3]; + + let result = evaporator.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + } +} diff --git a/crates/components/src/heat_exchanger/evaporator_coil.rs b/crates/components/src/heat_exchanger/evaporator_coil.rs new file mode 100644 index 0000000..1ec5b17 --- /dev/null +++ b/crates/components/src/heat_exchanger/evaporator_coil.rs @@ -0,0 +1,208 @@ +//! Evaporator Coil Component +//! +//! An air-side (finned) heat exchanger for refrigerant evaporation. +//! The refrigerant (cold side) evaporates, absorbing heat from air (hot side). +//! Used in split systems and air-source heat pumps. +//! +//! ## Port Convention +//! +//! - **Hot side (air)**: Heat source — connect to Fan outlet/inlet +//! - **Cold side (refrigerant)**: Evaporating +//! +//! ## Integration with Fan +//! +//! Connect Fan outlet → EvaporatorCoil air inlet, EvaporatorCoil air outlet → Fan inlet. +//! Use `FluidId::new("Air")` for air ports. + +use super::evaporator::Evaporator; +use crate::{ + Component, ComponentError, ConnectedPort, JacobianBuilder, ResidualVector, SystemState, +}; +use crate::state_machine::{CircuitId, OperationalState, StateManageable}; + +/// Evaporator coil (air-side finned heat exchanger). +/// +/// Explicit component for air-source evaporators. Uses ε-NTU method. +/// Refrigerant evaporates on cold side, air on hot side. +/// +/// # Example +/// +/// ``` +/// use entropyk_components::heat_exchanger::EvaporatorCoil; +/// use entropyk_components::Component; +/// +/// let coil = EvaporatorCoil::new(8_000.0); // UA = 8 kW/K +/// assert_eq!(coil.ua(), 8_000.0); +/// assert_eq!(coil.n_equations(), 3); +/// ``` +#[derive(Debug)] +pub struct EvaporatorCoil { + inner: Evaporator, +} + +impl EvaporatorCoil { + /// Creates a new evaporator coil with the given UA value. + /// + /// # Arguments + /// + /// * `ua` - Overall heat transfer coefficient × Area (W/K) + pub fn new(ua: f64) -> Self { + Self { + inner: Evaporator::new(ua), + } + } + + /// Creates an evaporator coil with specific saturation and superheat. + pub fn with_superheat(ua: f64, saturation_temp: f64, superheat_target: f64) -> Self { + Self { + inner: Evaporator::with_superheat(ua, saturation_temp, superheat_target), + } + } + + /// Returns the name of this component. + pub fn name(&self) -> &str { + "EvaporatorCoil" + } + + /// Returns the UA value. + pub fn ua(&self) -> f64 { + self.inner.ua() + } + + /// Returns the saturation temperature. + pub fn saturation_temp(&self) -> f64 { + self.inner.saturation_temp() + } + + /// Returns the superheat target. + pub fn superheat_target(&self) -> f64 { + self.inner.superheat_target() + } + + /// Sets the saturation temperature. + pub fn set_saturation_temp(&mut self, temp: f64) { + self.inner.set_saturation_temp(temp); + } + + /// Sets the superheat target. + pub fn set_superheat_target(&mut self, superheat: f64) { + self.inner.set_superheat_target(superheat); + } +} + +impl Component for EvaporatorCoil { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + self.inner.compute_residuals(state, residuals) + } + + fn jacobian_entries( + &self, + state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + self.inner.jacobian_entries(state, jacobian) + } + + fn n_equations(&self) -> usize { + self.inner.n_equations() + } + + fn get_ports(&self) -> &[ConnectedPort] { + self.inner.get_ports() + } +} + +impl StateManageable for EvaporatorCoil { + fn state(&self) -> OperationalState { + self.inner.state() + } + + fn set_state(&mut self, state: OperationalState) -> Result<(), ComponentError> { + self.inner.set_state(state) + } + + fn can_transition_to(&self, target: OperationalState) -> bool { + self.inner.can_transition_to(target) + } + + fn circuit_id(&self) -> &CircuitId { + self.inner.circuit_id() + } + + fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.inner.set_circuit_id(circuit_id); + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_evaporator_coil_creation() { + let coil = EvaporatorCoil::new(8_000.0); + assert_eq!(coil.ua(), 8_000.0); + assert_eq!(coil.name(), "EvaporatorCoil"); + } + + #[test] + fn test_evaporator_coil_n_equations() { + let coil = EvaporatorCoil::new(5_000.0); + assert_eq!(coil.n_equations(), 3); + } + + #[test] + fn test_evaporator_coil_with_superheat() { + let coil = EvaporatorCoil::with_superheat(8_000.0, 278.15, 5.0); + assert_eq!(coil.saturation_temp(), 278.15); + assert_eq!(coil.superheat_target(), 5.0); + } + + #[test] + fn test_evaporator_coil_compute_residuals() { + let coil = EvaporatorCoil::new(8_000.0); + let state = vec![0.0; 10]; + let mut residuals = vec![0.0; 3]; + let result = coil.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + assert!(residuals.iter().all(|r| r.is_finite()), "residuals must be finite"); + } + + #[test] + fn test_evaporator_coil_jacobian_entries() { + let coil = EvaporatorCoil::new(8_000.0); + let state = vec![0.0; 10]; + let mut jacobian = crate::JacobianBuilder::new(); + let result = coil.jacobian_entries(&state, &mut jacobian); + assert!(result.is_ok()); + // HeatExchanger base returns empty jacobian until framework implements it + assert!( + jacobian.is_empty(), + "delegation works; empty jacobian expected until HeatExchanger implements entries" + ); + } + + #[test] + fn test_evaporator_coil_setters() { + let mut coil = EvaporatorCoil::new(8_000.0); + coil.set_saturation_temp(275.0); + coil.set_superheat_target(7.0); + assert!((coil.saturation_temp() - 275.0).abs() < 1e-10); + assert!((coil.superheat_target() - 7.0).abs() < 1e-10); + } + + #[test] + fn test_evaporator_coil_state_manageable() { + use crate::state_machine::{OperationalState, StateManageable}; + + let mut coil = EvaporatorCoil::new(8_000.0); + assert_eq!(coil.state(), OperationalState::On); + assert!(coil.can_transition_to(OperationalState::Off)); + assert!(coil.set_state(OperationalState::Off).is_ok()); + assert_eq!(coil.state(), OperationalState::Off); + } +} diff --git a/crates/components/src/heat_exchanger/exchanger.rs b/crates/components/src/heat_exchanger/exchanger.rs index 6d2617c..0990072 100644 --- a/crates/components/src/heat_exchanger/exchanger.rs +++ b/crates/components/src/heat_exchanger/exchanger.rs @@ -262,10 +262,34 @@ impl HeatExchanger { self.fluid_backend.is_some() } + /// Computes the full thermodynamic state at the hot inlet. + pub fn hot_inlet_state(&self) -> Result { + let backend = self.fluid_backend.as_ref().ok_or_else(|| ComponentError::CalculationFailed("No FluidBackend configured".to_string()))?; + let conditions = self.hot_conditions.as_ref().ok_or_else(|| ComponentError::CalculationFailed("Hot conditions not set".to_string()))?; + let h = self.query_enthalpy(conditions)?; + backend.full_state( + conditions.fluid_id().clone(), + Pressure::from_pascals(conditions.pressure_pa()), + entropyk_core::Enthalpy::from_joules_per_kg(h), + ).map_err(|e| ComponentError::CalculationFailed(format!("Failed to compute hot inlet state: {}", e))) + } + + /// Computes the full thermodynamic state at the cold inlet. + pub fn cold_inlet_state(&self) -> Result { + let backend = self.fluid_backend.as_ref().ok_or_else(|| ComponentError::CalculationFailed("No FluidBackend configured".to_string()))?; + let conditions = self.cold_conditions.as_ref().ok_or_else(|| ComponentError::CalculationFailed("Cold conditions not set".to_string()))?; + let h = self.query_enthalpy(conditions)?; + backend.full_state( + conditions.fluid_id().clone(), + Pressure::from_pascals(conditions.pressure_pa()), + entropyk_core::Enthalpy::from_joules_per_kg(h), + ).map_err(|e| ComponentError::CalculationFailed(format!("Failed to compute cold inlet state: {}", e))) + } + /// Queries Cp (J/(kg·K)) from the backend for a given side. fn query_cp(&self, conditions: &HxSideConditions) -> Result { if let Some(backend) = &self.fluid_backend { - let state = ThermoState::from_pt( + let state = entropyk_fluids::FluidState::from_pt( Pressure::from_pascals(conditions.pressure_pa()), Temperature::from_kelvin(conditions.temperature_k()), ); @@ -279,7 +303,7 @@ impl HeatExchanger { /// Queries specific enthalpy (J/kg) from the backend for a given side at (P, T). fn query_enthalpy(&self, conditions: &HxSideConditions) -> Result { if let Some(backend) = &self.fluid_backend { - let state = ThermoState::from_pt( + let state = entropyk_fluids::FluidState::from_pt( Pressure::from_pascals(conditions.pressure_pa()), Temperature::from_kelvin(conditions.temperature_k()), ); diff --git a/crates/components/src/heat_exchanger/lmtd.rs b/crates/components/src/heat_exchanger/lmtd.rs new file mode 100644 index 0000000..0e65ef4 --- /dev/null +++ b/crates/components/src/heat_exchanger/lmtd.rs @@ -0,0 +1,398 @@ +//! Log Mean Temperature Difference (LMTD) Model +//! +//! Implements the LMTD method for heat exchanger calculations. +//! +//! ## Theory +//! +//! The heat transfer rate is calculated as: +//! +//! $$\dot{Q} = U \cdot A \cdot \Delta T_{lm} \cdot F$$ +//! +//! Where: +//! - $\dot{Q}$: Heat transfer rate (W) +//! - $U$: Overall heat transfer coefficient (W/m²·K) +//! - $A$: Heat transfer area (m²) +//! - $\Delta T_{lm}$: Log mean temperature difference (K) +//! - $F$: Correction factor for flow configuration +//! +//! For counter-flow: +//! $$\Delta T_{lm} = \frac{\Delta T_1 - \Delta T_2}{\ln(\Delta T_1 / \Delta T_2)}$$ +//! +//! Where: +//! - $\Delta T_1 = T_{hot,in} - T_{cold,out}$ +//! - $\Delta T_2 = T_{hot,out} - T_{cold,in}$ + +use super::model::{FluidState, HeatTransferModel}; +use crate::ResidualVector; +use entropyk_core::Power; + +/// Flow configuration for the heat exchanger. +#[derive(Debug, Clone, Copy, PartialEq, Default)] +pub enum FlowConfiguration { + /// Counter-flow (most efficient) + #[default] + CounterFlow, + /// Parallel-flow (co-current) + ParallelFlow, + /// Cross-flow with correction factor + CrossFlow { + /// Correction factor F (typically 0.8-1.0) + correction_factor: f64, + }, + /// Shell-and-tube with 1 shell pass, 2 tube passes + ShellAndTube1_2, +} + +impl FlowConfiguration { + /// Returns the correction factor F for this configuration. + pub fn correction_factor(&self) -> f64 { + match self { + FlowConfiguration::CounterFlow => 1.0, + FlowConfiguration::ParallelFlow => 1.0, + FlowConfiguration::CrossFlow { correction_factor } => *correction_factor, + FlowConfiguration::ShellAndTube1_2 => 0.9, + } + } +} + +/// LMTD (Log Mean Temperature Difference) heat transfer model. +/// +/// Uses the classical LMTD method for heat exchanger sizing and rating. +/// +/// # Example +/// +/// ``` +/// use entropyk_components::heat_exchanger::{LmtdModel, FlowConfiguration, HeatTransferModel}; +/// use entropyk_components::heat_exchanger::model::FluidState; +/// +/// let model = LmtdModel::new(5000.0, FlowConfiguration::CounterFlow); +/// assert_eq!(model.ua(), 5000.0); +/// ``` +#[derive(Debug, Clone)] +pub struct LmtdModel { + /// Overall heat transfer coefficient × Area (W/K), nominal + ua: f64, + /// UA calibration scale: UA_eff = ua_scale × ua (default 1.0) + ua_scale: f64, + /// Flow configuration + flow_config: FlowConfiguration, +} + +impl LmtdModel { + /// Creates a new LMTD model. + /// + /// # Arguments + /// + /// * `ua` - Overall heat transfer coefficient × Area (W/K). Must be positive. + /// * `flow_config` - Flow configuration (counter-flow, parallel-flow, etc.) + /// + /// # Panics + /// + /// Panics if `ua` is negative or NaN. + /// + /// # Example + /// + /// ``` + /// use entropyk_components::heat_exchanger::{LmtdModel, FlowConfiguration}; + /// + /// let model = LmtdModel::new(10000.0, FlowConfiguration::CounterFlow); + /// ``` + pub fn new(ua: f64, flow_config: FlowConfiguration) -> Self { + assert!( + ua.is_finite() && ua >= 0.0, + "UA must be non-negative and finite, got {}", + ua + ); + Self { + ua, + ua_scale: 1.0, + flow_config, + } + } + + /// Creates a counter-flow LMTD model. + pub fn counter_flow(ua: f64) -> Self { + Self::new(ua, FlowConfiguration::CounterFlow) + } + + /// Creates a parallel-flow LMTD model. + pub fn parallel_flow(ua: f64) -> Self { + Self::new(ua, FlowConfiguration::ParallelFlow) + } + + /// Creates a cross-flow LMTD model with correction factor. + pub fn cross_flow(ua: f64, correction_factor: f64) -> Self { + Self::new(ua, FlowConfiguration::CrossFlow { correction_factor }) + } + + /// Calculates the Log Mean Temperature Difference. + /// + /// For counter-flow: + /// - ΔT₁ = T_hot,in - T_cold,out + /// - ΔT₂ = T_hot,out - T_cold,in + /// + /// For parallel-flow: + /// - ΔT₁ = T_hot,in - T_cold,in + /// - ΔT₂ = T_hot,out - T_cold,out + /// + /// Special handling when ΔT₁ ≈ ΔT₂: uses arithmetic mean. + pub fn lmtd(&self, t_hot_in: f64, t_hot_out: f64, t_cold_in: f64, t_cold_out: f64) -> f64 { + let (dt1, dt2) = match self.flow_config { + FlowConfiguration::CounterFlow + | FlowConfiguration::CrossFlow { .. } + | FlowConfiguration::ShellAndTube1_2 => (t_hot_in - t_cold_out, t_hot_out - t_cold_in), + FlowConfiguration::ParallelFlow => (t_hot_in - t_cold_in, t_hot_out - t_cold_out), + }; + + // Zero-flow / zero LMTD regularization: avoid division by zero (Story 3.5) + if dt1.abs() < 1e-10 && dt2.abs() < 1e-10 { + return 0.0; + } + + if (dt1 - dt2).abs() / dt1.max(dt2).max(1e-10) < 1e-6 { + return (dt1 + dt2) / 2.0; + } + + if dt1 <= 0.0 || dt2 <= 0.0 { + return (dt1 + dt2) / 2.0; + } + + (dt1 - dt2) / (dt1 / dt2).ln() + } +} + +impl HeatTransferModel for LmtdModel { + fn compute_heat_transfer( + &self, + hot_inlet: &FluidState, + hot_outlet: &FluidState, + cold_inlet: &FluidState, + cold_outlet: &FluidState, + ) -> Power { + let lmtd = self.lmtd( + hot_inlet.temperature, + hot_outlet.temperature, + cold_inlet.temperature, + cold_outlet.temperature, + ); + + let f = self.flow_config.correction_factor(); + let ua_eff = self.effective_ua(); + let q = ua_eff * lmtd * f; + + Power::from_watts(q) + } + + fn compute_residuals( + &self, + hot_inlet: &FluidState, + hot_outlet: &FluidState, + cold_inlet: &FluidState, + cold_outlet: &FluidState, + residuals: &mut ResidualVector, + ) { + let q = self + .compute_heat_transfer(hot_inlet, hot_outlet, cold_inlet, cold_outlet) + .to_watts(); + + let q_hot = + hot_inlet.mass_flow * hot_inlet.cp * (hot_inlet.temperature - hot_outlet.temperature); + let q_cold = cold_inlet.mass_flow + * cold_inlet.cp + * (cold_outlet.temperature - cold_inlet.temperature); + + residuals[0] = q_hot - q; + residuals[1] = q_cold - q; + residuals[2] = q_hot - q_cold; + } + + fn n_equations(&self) -> usize { + 3 + } + + fn ua(&self) -> f64 { + self.ua + } + + fn ua_scale(&self) -> f64 { + self.ua_scale + } + + fn set_ua_scale(&mut self, s: f64) { + self.ua_scale = s; + } + + fn effective_ua(&self) -> f64 { + self.ua * self.ua_scale + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::heat_exchanger::{EpsNtuModel, HeatTransferModel}; + use approx::assert_relative_eq; + + #[test] + fn test_lmtd_model_creation() { + let model = LmtdModel::new(5000.0, FlowConfiguration::CounterFlow); + assert_eq!(model.ua(), 5000.0); + } + + #[test] + fn test_f_ua_scales_heat_transfer() { + let mut model = LmtdModel::new(5000.0, FlowConfiguration::CounterFlow); + assert_relative_eq!(model.effective_ua(), 5000.0, epsilon = 1e-10); + model.set_ua_scale(1.1); + assert_relative_eq!(model.effective_ua(), 5500.0, epsilon = 1e-10); + } + + #[test] + fn test_lmtd_counter_flow() { + let model = LmtdModel::counter_flow(5000.0); + + let t_hot_in = 80.0; + let t_hot_out = 60.0; + let t_cold_in = 20.0; + let t_cold_out = 50.0; + + let lmtd = model.lmtd(t_hot_in, t_hot_out, t_cold_in, t_cold_out); + + assert!(lmtd > 0.0); + assert!(lmtd < (t_hot_in - t_cold_in)); + } + + #[test] + fn test_lmtd_equal_deltas() { + let model = LmtdModel::counter_flow(5000.0); + + let t_hot_in = 80.0; + let t_hot_out = 60.0; + let t_cold_in = 40.0; + let t_cold_out = 60.0; + + let lmtd = model.lmtd(t_hot_in, t_hot_out, t_cold_in, t_cold_out); + + assert!((lmtd - 20.0).abs() < 0.1); + } + + #[test] + fn test_lmtd_parallel_flow() { + let model = LmtdModel::parallel_flow(5000.0); + + let t_hot_in = 80.0; + let t_hot_out = 60.0; + let t_cold_in = 20.0; + let t_cold_out = 40.0; + + let lmtd = model.lmtd(t_hot_in, t_hot_out, t_cold_in, t_cold_out); + + assert!(lmtd > 0.0); + } + + #[test] + fn test_compute_heat_transfer() { + let model = LmtdModel::counter_flow(5000.0); + + let hot_inlet = FluidState::from_temperature(80.0 + 273.15); + let hot_outlet = FluidState::from_temperature(60.0 + 273.15); + let cold_inlet = FluidState::from_temperature(20.0 + 273.15); + let cold_outlet = FluidState::from_temperature(50.0 + 273.15); + + let q = model.compute_heat_transfer(&hot_inlet, &hot_outlet, &cold_inlet, &cold_outlet); + + assert!(q.to_watts() > 0.0); + } + + #[test] + fn test_flow_configuration_correction_factor() { + assert_eq!(FlowConfiguration::CounterFlow.correction_factor(), 1.0); + assert_eq!(FlowConfiguration::ParallelFlow.correction_factor(), 1.0); + assert_eq!(FlowConfiguration::ShellAndTube1_2.correction_factor(), 0.9); + + let cross = FlowConfiguration::CrossFlow { + correction_factor: 0.85, + }; + assert_eq!(cross.correction_factor(), 0.85); + } + + #[test] + fn test_n_equations() { + let model = LmtdModel::counter_flow(1000.0); + assert_eq!(model.n_equations(), 3); + } + + #[test] + fn test_lmtd_negative_deltas() { + let model = LmtdModel::counter_flow(5000.0); + + let t_hot_in = 40.0; + let t_hot_out = 50.0; + let t_cold_in = 60.0; + let t_cold_out = 70.0; + + let lmtd = model.lmtd(t_hot_in, t_hot_out, t_cold_in, t_cold_out); + + assert!(lmtd < 0.0); + } + + #[test] + #[should_panic(expected = "UA must be non-negative")] + fn test_negative_ua_panics() { + let _model = LmtdModel::new(-1000.0, FlowConfiguration::CounterFlow); + } + + #[test] + fn test_zero_ua_allowed() { + let model = LmtdModel::new(0.0, FlowConfiguration::CounterFlow); + assert_eq!(model.ua(), 0.0); + } + + #[test] + fn test_lmtd_vs_eps_ntu_comparison() { + // AC #8: Compare LMTD vs ε-NTU results for same conditions + // Verify both methods produce reasonable heat transfer values + + let ua = 5_000.0; + let lmtd_model = LmtdModel::counter_flow(ua); + let eps_ntu_model = EpsNtuModel::counter_flow(ua); + + // Typical HVAC operating conditions + // Hot water cooling from 80°C to 60°C (353K to 333K) + // Cold water heating from 20°C to 40°C (293K to 313K) + let hot_inlet = FluidState::new(353.0, 200_000.0, 335_000.0, 0.5, 4180.0); + let hot_outlet = FluidState::new(333.0, 195_000.0, 250_000.0, 0.5, 4180.0); + let cold_inlet = FluidState::new(293.0, 101_325.0, 85_000.0, 0.3, 4180.0); + let cold_outlet = FluidState::new(313.0, 101_325.0, 170_000.0, 0.3, 4180.0); + + let q_lmtd = lmtd_model + .compute_heat_transfer(&hot_inlet, &hot_outlet, &cold_inlet, &cold_outlet) + .to_watts(); + let q_eps_ntu = eps_ntu_model + .compute_heat_transfer(&hot_inlet, &hot_outlet, &cold_inlet, &cold_outlet) + .to_watts(); + + // Both methods should give positive heat transfer + assert!(q_lmtd > 0.0, "LMTD should give positive Q, got {}", q_lmtd); + assert!( + q_eps_ntu > 0.0, + "ε-NTU should give positive Q, got {}", + q_eps_ntu + ); + + // Verify reasonable magnitude for a 5 kW/K heat exchanger + // LMTD should be around 30-40K, so Q should be 150-200 kW range for these temps + assert!( + q_lmtd > 100_000.0 && q_lmtd < 300_000.0, + "LMTD Q unexpected: {}", + q_lmtd + ); + + // ε-NTU uses inlet temps only, so result differs from LMTD + assert!( + q_eps_ntu < 300_000.0, + "ε-NTU Q should be reasonable, got {}", + q_eps_ntu + ); + } +} diff --git a/crates/components/src/heat_exchanger/mod.rs b/crates/components/src/heat_exchanger/mod.rs new file mode 100644 index 0000000..b93b3e2 --- /dev/null +++ b/crates/components/src/heat_exchanger/mod.rs @@ -0,0 +1,51 @@ +//! Heat Exchanger Framework +//! +//! This module provides a pluggable heat exchanger framework supporting multiple +//! calculation models (LMTD, ε-NTU) for thermodynamic simulations. +//! +//! ## Architecture +//! +//! The framework uses the Strategy Pattern for heat transfer calculations: +//! +//! - [`HeatTransferModel`]: Trait for pluggable calculation strategies +//! - [`LmtdModel`]: Log Mean Temperature Difference method +//! - [`EpsNtuModel`]: Effectiveness-NTU method +//! - [`HeatExchanger`]: Generic heat exchanger component +//! +//! ## Components +//! +//! - [`Condenser`]: Refrigerant condensing (phase change) on hot side +//! - [`Evaporator`]: Refrigerant evaporating (phase change) on cold side +//! - [`EvaporatorCoil`]: Air-side evaporator (finned coil) +//! - [`CondenserCoil`]: Air-side condenser (finned coil) +//! - [`Economizer`]: Internal heat exchanger with bypass support +//! +//! ## Example +//! +//! ```rust +//! use entropyk_components::heat_exchanger::{HeatExchanger, LmtdModel, FlowConfiguration}; +//! +//! // Create a heat exchanger with LMTD model +//! let model = LmtdModel::new(5000.0, FlowConfiguration::CounterFlow); +//! // Heat exchanger would be created with connected ports +//! ``` + +pub mod condenser; +pub mod condenser_coil; +pub mod economizer; +pub mod evaporator_coil; +pub mod eps_ntu; +pub mod evaporator; +pub mod exchanger; +pub mod lmtd; +pub mod model; + +pub use condenser::Condenser; +pub use condenser_coil::CondenserCoil; +pub use economizer::Economizer; +pub use evaporator_coil::EvaporatorCoil; +pub use eps_ntu::{EpsNtuModel, ExchangerType}; +pub use evaporator::Evaporator; +pub use exchanger::{HeatExchanger, HeatExchangerBuilder, HxSideConditions}; +pub use lmtd::{FlowConfiguration, LmtdModel}; +pub use model::HeatTransferModel; diff --git a/crates/components/src/heat_exchanger/model.rs b/crates/components/src/heat_exchanger/model.rs new file mode 100644 index 0000000..3c69df9 --- /dev/null +++ b/crates/components/src/heat_exchanger/model.rs @@ -0,0 +1,204 @@ +//! Heat Transfer Model Trait +//! +//! Defines the Strategy Pattern interface for heat transfer calculations. +//! This trait is object-safe for dynamic dispatch. + +use crate::ResidualVector; +use entropyk_core::{Enthalpy, MassFlow, Power, Pressure, Temperature}; + +/// Fluid state for heat transfer calculations. +/// +/// Represents the thermodynamic state at a port (inlet or outlet). +#[derive(Debug, Clone, Copy)] +pub struct FluidState { + /// Temperature in Kelvin + pub temperature: f64, + /// Pressure in Pascals + pub pressure: f64, + /// Specific enthalpy in J/kg + pub enthalpy: f64, + /// Mass flow rate in kg/s + pub mass_flow: f64, + /// Specific heat capacity at constant pressure in J/(kg·K) + pub cp: f64, +} + +impl Default for FluidState { + fn default() -> Self { + Self { + temperature: 300.0, + pressure: 101_325.0, + enthalpy: 0.0, + mass_flow: 0.1, + cp: 1000.0, + } + } +} + +impl FluidState { + /// Creates a new fluid state. + pub fn new(temperature: f64, pressure: f64, enthalpy: f64, mass_flow: f64, cp: f64) -> Self { + Self { + temperature, + pressure, + enthalpy, + mass_flow, + cp, + } + } + + /// Creates a fluid state with default properties for a given temperature. + pub fn from_temperature(temperature: f64) -> Self { + Self { + temperature, + ..Default::default() + } + } + + /// Returns the heat capacity rate C = ṁ × Cp in W/K. + pub fn heat_capacity_rate(&self) -> f64 { + self.mass_flow * self.cp + } + + /// Creates a FluidState from strongly-typed physical quantities. + pub fn from_types( + temperature: Temperature, + pressure: Pressure, + enthalpy: Enthalpy, + mass_flow: MassFlow, + cp: f64, + ) -> Self { + Self { + temperature: temperature.to_kelvin(), + pressure: pressure.to_pascals(), + enthalpy: enthalpy.to_joules_per_kg(), + mass_flow: mass_flow.to_kg_per_s(), + cp, + } + } + + /// Returns temperature as a strongly-typed Temperature. + pub fn temperature(&self) -> Temperature { + Temperature::from_kelvin(self.temperature) + } + + /// Returns pressure as a strongly-typed Pressure. + pub fn pressure(&self) -> Pressure { + Pressure::from_pascals(self.pressure) + } + + /// Returns enthalpy as a strongly-typed Enthalpy. + pub fn enthalpy(&self) -> Enthalpy { + Enthalpy::from_joules_per_kg(self.enthalpy) + } + + /// Returns mass flow as a strongly-typed MassFlow. + pub fn mass_flow(&self) -> MassFlow { + MassFlow::from_kg_per_s(self.mass_flow) + } +} + +/// Trait for heat transfer calculation models. +/// +/// This trait uses the Strategy Pattern to allow different heat transfer +/// calculation methods (LMTD, ε-NTU, etc.) to be used interchangeably. +/// +/// # Object Safety +/// +/// This trait is object-safe and can be used with dynamic dispatch: +/// +/// ``` +/// # use entropyk_components::heat_exchanger::model::{HeatTransferModel, FluidState}; +/// # use entropyk_components::ResidualVector; +/// # use entropyk_core::Power; +/// struct SimpleModel { ua: f64 } +/// impl HeatTransferModel for SimpleModel { +/// fn compute_heat_transfer(&self, _: &FluidState, _: &FluidState, _: &FluidState, _: &FluidState) -> Power { +/// Power::from_watts(0.0) +/// } +/// fn compute_residuals(&self, _: &FluidState, _: &FluidState, _: &FluidState, _: &FluidState, _: &mut ResidualVector) {} +/// fn n_equations(&self) -> usize { 3 } +/// fn ua(&self) -> f64 { self.ua } +/// } +/// let model: Box = Box::new(SimpleModel { ua: 1000.0 }); +/// ``` +pub trait HeatTransferModel: Send + Sync { + /// Computes the heat transfer rate Q̇ (Watts). + /// + /// # Arguments + /// + /// * `hot_inlet` - Hot side inlet state + /// * `hot_outlet` - Hot side outlet state + /// * `cold_inlet` - Cold side inlet state + /// * `cold_outlet` - Cold side outlet state + /// + /// # Returns + /// + /// The heat transfer rate in Watts (positive = heat flows from hot to cold) + fn compute_heat_transfer( + &self, + hot_inlet: &FluidState, + hot_outlet: &FluidState, + cold_inlet: &FluidState, + cold_outlet: &FluidState, + ) -> Power; + + /// Computes residuals for the solver. + /// + /// The residuals represent the error in the heat transfer equations + /// that the solver will attempt to drive to zero. + fn compute_residuals( + &self, + hot_inlet: &FluidState, + hot_outlet: &FluidState, + cold_inlet: &FluidState, + cold_outlet: &FluidState, + residuals: &mut ResidualVector, + ); + + /// Returns the number of equations this model contributes. + fn n_equations(&self) -> usize; + + /// Returns the nominal UA value (overall heat transfer coefficient × area) in W/K. + fn ua(&self) -> f64; + + /// Returns the UA calibration scale (default 1.0). UA_eff = ua_scale × ua_nominal. + fn ua_scale(&self) -> f64 { + 1.0 + } + + /// Sets the UA calibration scale (e.g. from Calib.f_ua). + fn set_ua_scale(&mut self, _s: f64) {} + + /// Returns the effective UA used in heat transfer: ua_scale × ua_nominal. + fn effective_ua(&self) -> f64 { + self.ua() * self.ua_scale() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_fluid_state_default() { + let state = FluidState::default(); + assert_eq!(state.temperature, 300.0); + assert_eq!(state.pressure, 101_325.0); + assert_eq!(state.cp, 1000.0); + } + + #[test] + fn test_fluid_state_heat_capacity_rate() { + let state = FluidState::new(300.0, 101_325.0, 0.0, 0.5, 2000.0); + let c = state.heat_capacity_rate(); + assert!((c - 1000.0).abs() < 1e-10); + } + + #[test] + fn test_fluid_state_from_temperature() { + let state = FluidState::from_temperature(350.0); + assert_eq!(state.temperature, 350.0); + assert_eq!(state.pressure, 101_325.0); + } +} diff --git a/crates/components/src/pipe.rs b/crates/components/src/pipe.rs new file mode 100644 index 0000000..3192da6 --- /dev/null +++ b/crates/components/src/pipe.rs @@ -0,0 +1,1011 @@ +//! Pipe Component Implementation +//! +//! This module provides a pipe component for fluid transport with +//! pressure drop calculation using the Darcy-Weisbach equation. +//! +//! **Pipe serves for both refrigerant and incompressible fluid circuits** (water, +//! seawater, glycol, etc.). Use [`Pipe::for_incompressible`] or [`Pipe::for_refrigerant`] +//! with **explicit ρ and μ** obtained from a fluid backend. +//! +//! ## Fluid Support +//! +//! - **Refrigerant** (compressible): ρ and μ vary with P,T. Use [`Pipe::for_refrigerant`] +//! with design-point values from CoolProp or tabular backend. +//! +//! - **Incompressible** (water, seawater, glycol): ρ and μ from `IncompressibleBackend` +//! (Story 2.7). **Do not hardcode**—obtain properties via fluid backend. +//! +//! ## Darcy-Weisbach Equation +//! +//! ```text +//! ΔP = f × (L/D) × (ρ × v² / 2) +//! ``` +//! +//! Where: +//! - f = Darcy friction factor (dimensionless) +//! - L = Pipe length (m) +//! - D = Pipe inner diameter (m) +//! - ρ = Fluid density (kg/m³) +//! - v = Flow velocity (m/s) +//! +//! ## Haaland Friction Factor +//! +//! For turbulent flow, the Haaland approximation is used: +//! +//! ```text +//! 1/√f = -1.8 × log10[(ε/D/3.7)^1.11 + 6.9/Re] +//! ``` +//! +//! Where: +//! - ε = Pipe roughness (m) +//! - Re = Reynolds number = ρ × v × D / μ + +use crate::port::{Connected, Disconnected, FluidId, Port}; +use crate::state_machine::StateManageable; +use crate::{ + CircuitId, Component, ComponentError, ConnectedPort, JacobianBuilder, OperationalState, + ResidualVector, SystemState, +}; +use entropyk_core::{Calib, MassFlow}; +use std::marker::PhantomData; + +/// Common pipe materials and their typical roughness values (in meters). +pub mod roughness { + /// Smooth drawn tubing (copper, plastic) - 0.0015 mm + pub const SMOOTH: f64 = 1.5e-6; + /// Commercial steel pipe - 0.045 mm + pub const STEEL_COMMERCIAL: f64 = 4.5e-5; + /// Galvanized iron - 0.15 mm + pub const GALVANIZED_IRON: f64 = 1.5e-4; + /// Cast iron - 0.26 mm + pub const CAST_IRON: f64 = 2.6e-4; + /// Concrete - 1.0 mm + pub const CONCRETE: f64 = 1.0e-3; + /// PVC/HDPE plastic - 0.0015 mm + pub const PLASTIC: f64 = 1.5e-6; +} + +/// Pipe geometry specification. +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct PipeGeometry { + /// Pipe length in meters + pub length_m: f64, + /// Inner diameter in meters + pub diameter_m: f64, + /// Pipe roughness in meters + pub roughness_m: f64, +} + +impl PipeGeometry { + /// Creates a new pipe geometry specification. + /// + /// # Arguments + /// + /// * `length_m` - Pipe length in meters + /// * `diameter_m` - Inner diameter in meters + /// * `roughness_m` - Pipe roughness in meters (use values from `roughness` module) + /// + /// # Errors + /// + /// Returns an error if any dimension is non-positive. + pub fn new(length_m: f64, diameter_m: f64, roughness_m: f64) -> Result { + if length_m <= 0.0 { + return Err(ComponentError::InvalidState( + "Pipe length must be positive".to_string(), + )); + } + if diameter_m <= 0.0 { + return Err(ComponentError::InvalidState( + "Pipe diameter must be positive".to_string(), + )); + } + if roughness_m < 0.0 { + return Err(ComponentError::InvalidState( + "Pipe roughness cannot be negative".to_string(), + )); + } + + Ok(Self { + length_m, + diameter_m, + roughness_m, + }) + } + + /// Creates a smooth pipe geometry. + pub fn smooth(length_m: f64, diameter_m: f64) -> Result { + Self::new(length_m, diameter_m, roughness::SMOOTH) + } + + /// Creates a commercial steel pipe geometry. + pub fn steel(length_m: f64, diameter_m: f64) -> Result { + Self::new(length_m, diameter_m, roughness::STEEL_COMMERCIAL) + } + + /// Returns the cross-sectional area in m². + pub fn area(&self) -> f64 { + std::f64::consts::PI * self.diameter_m * self.diameter_m / 4.0 + } + + /// Returns the length-to-diameter ratio (L/D). + pub fn ld_ratio(&self) -> f64 { + self.length_m / self.diameter_m + } + + /// Returns the relative roughness (ε/D). + pub fn relative_roughness(&self) -> f64 { + self.roughness_m / self.diameter_m + } +} + +/// Friction factor calculation methods. +pub mod friction_factor { + use entropyk_core::MIN_MASS_FLOW_REGULARIZATION_KG_S; + + /// Minimum Reynolds number for zero-flow regularization. + /// + /// Reynolds is dimensionless (Re = ρvD/μ), so MIN_REYNOLDS = 1.0 is physically reasonable + /// for preventing division by zero. This is independent of [`MIN_MASS_FLOW_REGULARIZATION_KG_S`] + /// which applies to mass flow (kg/s). Both serve the same purpose: avoiding NaN/Inf in denominators. + /// + /// [`MIN_MASS_FLOW_REGULARIZATION_KG_S`]: entropyk_core::MIN_MASS_FLOW_REGULARIZATION_KG_S + const MIN_REYNOLDS: f64 = 1.0; + + /// Calculates the Haaland friction factor for turbulent flow. + /// + /// The Haaland equation is an approximation of the Colebrook-White equation + /// that can be solved explicitly without iteration. + /// + /// # Arguments + /// + /// * `relative_roughness` - ε/D (dimensionless) + /// * `reynolds` - Reynolds number (dimensionless) + /// + /// # Returns + /// + /// Darcy friction factor f + /// + /// # Zero-flow regularization + /// + /// Re is clamped to at least `MIN_REYNOLDS` so that divisions (64/Re, 6.9/Re) never cause NaN/Inf. + pub fn haaland(relative_roughness: f64, reynolds: f64) -> f64 { + if reynolds <= 0.0 { + return 0.02; // Default for invalid input + } + let reynolds = reynolds.max(MIN_REYNOLDS); + + // Laminar flow: f = 64/Re + if reynolds < 2300.0 { + return 64.0 / reynolds; + } + + // Haaland equation (turbulent) + // 1/√f = -1.8 × log10[(ε/D/3.7)^1.11 + 6.9/Re] + let term1 = (relative_roughness / 3.7).powf(1.11); + let term2 = 6.9 / reynolds; + let inv_sqrt_f = -1.8 * (term1 + term2).log10(); + + 1.0 / (inv_sqrt_f * inv_sqrt_f) + } + + /// Calculates the Swamee-Jain friction factor (alternative to Haaland). + /// + /// Explicit approximation valid for: + /// - 10^-6 < ε/D < 10^-2 + /// - 5000 < Re < 10^8 + /// + /// # Zero-flow regularization + /// + /// Re is clamped to at least `MIN_REYNOLDS` so that divisions by Re never cause NaN/Inf. + pub fn swamee_jain(relative_roughness: f64, reynolds: f64) -> f64 { + if reynolds <= 0.0 { + return 0.02; + } + let reynolds = reynolds.max(MIN_REYNOLDS); + + if reynolds < 2300.0 { + return 64.0 / reynolds; + } + + let term1 = relative_roughness / 3.7; + let term2 = 5.74 / reynolds.powf(0.9); + let log_term = (term1 + term2).log10(); + + 0.25 / (log_term * log_term) + } + + /// Simple friction factor for quick estimates. + /// + /// Returns f ≈ 0.02 for turbulent flow (typical for commercial pipes). + pub fn simplified(_relative_roughness: f64, reynolds: f64) -> f64 { + if reynolds < 2300.0 { + return 64.0 / reynolds.max(1.0); + } + 0.02 + } +} + +/// A pipe component with pressure drop calculation. +/// +/// Uses the Darcy-Weisbach equation with the Haaland friction factor +/// for accurate pressure drop estimation. +/// +/// **Dual refrigerant/incompressible usage:** Use [`Pipe::for_incompressible`] for water, +/// seawater, glycol (ρ, μ from backend); use [`Pipe::for_refrigerant`] for refrigerant +/// circuits with design-point ρ and μ from a fluid backend. +/// +/// # Example +/// +/// ```ignore +/// use entropyk_components::pipe::{Pipe, PipeGeometry, roughness}; +/// use entropyk_components::port::{FluidId, Port}; +/// use entropyk_core::{Pressure, Enthalpy}; +/// +/// // Create a 10m long, 50mm diameter steel pipe +/// let geometry = PipeGeometry::steel(10.0, 0.05).unwrap(); +/// +/// let inlet = Port::new( +/// FluidId::new("Water"), +/// Pressure::from_bar(2.0), +/// Enthalpy::from_joules_per_kg(100000.0), +/// ); +/// let outlet = Port::new( +/// FluidId::new("Water"), +/// Pressure::from_bar(2.0), +/// Enthalpy::from_joules_per_kg(100000.0), +/// ); +/// +/// let pipe = Pipe::new(geometry, inlet, outlet, 1000.0, 0.001).unwrap(); +/// ``` +#[derive(Debug, Clone)] +pub struct Pipe { + /// Pipe geometry + geometry: PipeGeometry, + /// Inlet port + port_inlet: Port, + /// Outlet port + port_outlet: Port, + /// Fluid density in kg/m³ + fluid_density_kg_per_m3: f64, + /// Fluid dynamic viscosity in Pa·s + fluid_viscosity_pa_s: f64, + /// Calibration: ΔP_eff = f_dp × ΔP_nominal + calib: Calib, + /// Circuit identifier + circuit_id: CircuitId, + /// Operational state + operational_state: OperationalState, + /// Phantom data for type state + _state: PhantomData, +} + +impl Pipe { + /// Creates a new disconnected pipe. + /// + /// # Arguments + /// + /// * `geometry` - Pipe geometry specification + /// * `port_inlet` - Inlet port (disconnected) + /// * `port_outlet` - Outlet port (disconnected) + /// * `fluid_density` - Fluid density in kg/m³ + /// * `fluid_viscosity` - Fluid dynamic viscosity in Pa·s + pub fn new( + geometry: PipeGeometry, + port_inlet: Port, + port_outlet: Port, + fluid_density: f64, + fluid_viscosity: f64, + ) -> Result { + if port_inlet.fluid_id() != port_outlet.fluid_id() { + return Err(ComponentError::InvalidState( + "Inlet and outlet ports must have the same fluid type".to_string(), + )); + } + + if fluid_density <= 0.0 { + return Err(ComponentError::InvalidState( + "Fluid density must be positive".to_string(), + )); + } + + if fluid_viscosity <= 0.0 { + return Err(ComponentError::InvalidState( + "Fluid viscosity must be positive".to_string(), + )); + } + + Ok(Self { + geometry, + port_inlet, + port_outlet, + fluid_density_kg_per_m3: fluid_density, + fluid_viscosity_pa_s: fluid_viscosity, + calib: Calib::default(), + circuit_id: CircuitId::default(), + operational_state: OperationalState::default(), + _state: PhantomData, + }) + } + + /// Creates a pipe for incompressible fluid circuits (water, seawater, glycol). + /// + /// **Obtain ρ and μ from a fluid backend** (e.g. `IncompressibleBackend` from Story 2.7). + /// Do not hardcode—water, seawater, and glycol have different properties. + /// + /// # Arguments + /// + /// * `geometry` - Pipe geometry specification + /// * `port_inlet` - Inlet port (disconnected) + /// * `port_outlet` - Outlet port (disconnected) + /// * `density` - Fluid density at design point (kg/m³) + /// * `viscosity` - Fluid dynamic viscosity at design point (Pa·s) + /// + /// # Example + /// + /// ``` + /// use entropyk_components::pipe::{Pipe, PipeGeometry}; + /// use entropyk_components::port::{FluidId, Port}; + /// use entropyk_core::{Pressure, Enthalpy}; + /// + /// let geometry = PipeGeometry::smooth(5.0, 0.025).unwrap(); + /// let inlet = Port::new( + /// FluidId::new("Water"), + /// Pressure::from_bar(2.0), + /// Enthalpy::from_joules_per_kg(100000.0), + /// ); + /// let outlet = Port::new( + /// FluidId::new("Water"), + /// Pressure::from_bar(2.0), + /// Enthalpy::from_joules_per_kg(100000.0), + /// ); + /// // Get ρ, μ from IncompressibleBackend.property() at design temperature + /// let pipe = Pipe::for_incompressible(geometry, inlet, outlet, 998.0, 0.001).unwrap(); + /// assert!((pipe.fluid_density() - 998.0).abs() < 1e-6); + /// ``` + pub fn for_incompressible( + geometry: PipeGeometry, + port_inlet: Port, + port_outlet: Port, + density: f64, + viscosity: f64, + ) -> Result { + Self::new(geometry, port_inlet, port_outlet, density, viscosity) + } + + /// Creates a pipe for refrigerant circuits with explicit design-point properties. + /// + /// Refrigerant ρ and μ vary with pressure and temperature. These values are + /// **design-point typical values** at the operating condition. For accurate + /// simulation, obtain ρ and μ from the fluid properties backend (CoolProp, + /// tabular interpolation, etc.). + /// + /// Typical design-point values (liquid phase): R134a 40°C ~1140 kg/m³, ~0.0002 Pa·s; + /// R410A 40°C ~1050 kg/m³, ~0.00015 Pa·s. + /// + /// # Arguments + /// + /// * `geometry` - Pipe geometry specification + /// * `port_inlet` - Inlet port (disconnected) + /// * `port_outlet` - Outlet port (disconnected) + /// * `density` - Fluid density at design point (kg/m³) + /// * `viscosity` - Fluid dynamic viscosity at design point (Pa·s) + /// + /// # Example + /// + /// ``` + /// use entropyk_components::pipe::{Pipe, PipeGeometry}; + /// use entropyk_components::port::{FluidId, Port}; + /// use entropyk_core::{Pressure, Enthalpy}; + /// + /// let geometry = PipeGeometry::smooth(3.0, 0.012).unwrap(); + /// let inlet = Port::new( + /// FluidId::new("R134a"), + /// Pressure::from_bar(10.0), + /// Enthalpy::from_joules_per_kg(250000.0), + /// ); + /// let outlet = Port::new( + /// FluidId::new("R134a"), + /// Pressure::from_bar(10.0), + /// Enthalpy::from_joules_per_kg(250000.0), + /// ); + /// // R134a liquid at ~40°C: ρ ≈ 1140, μ ≈ 0.0002 + /// let pipe = Pipe::for_refrigerant(geometry, inlet, outlet, 1140.0, 0.0002).unwrap(); + /// assert_eq!(pipe.fluid_id().as_str(), "R134a"); + /// ``` + pub fn for_refrigerant( + geometry: PipeGeometry, + port_inlet: Port, + port_outlet: Port, + density: f64, + viscosity: f64, + ) -> Result { + Self::new(geometry, port_inlet, port_outlet, density, viscosity) + } + + /// Returns the fluid identifier. + pub fn fluid_id(&self) -> &FluidId { + self.port_inlet.fluid_id() + } + + /// Returns the pipe geometry. + pub fn geometry(&self) -> &PipeGeometry { + &self.geometry + } + + /// Returns the fluid density. + pub fn fluid_density(&self) -> f64 { + self.fluid_density_kg_per_m3 + } + + /// Returns the fluid viscosity. + pub fn fluid_viscosity(&self) -> f64 { + self.fluid_viscosity_pa_s + } + + /// Returns calibration factors (f_dp for pressure drop scaling). + pub fn calib(&self) -> &Calib { + &self.calib + } + + /// Sets calibration factors. + pub fn set_calib(&mut self, calib: Calib) { + self.calib = calib; + } +} + +impl Pipe { + /// Returns the inlet port. + pub fn port_inlet(&self) -> &Port { + &self.port_inlet + } + + /// Returns the outlet port. + pub fn port_outlet(&self) -> &Port { + &self.port_outlet + } + + /// Calculates the flow velocity. + /// + /// # Arguments + /// + /// * `flow_m3_per_s` - Volumetric flow rate in m³/s + /// + /// # Returns + /// + /// Velocity in m/s + pub fn velocity(&self, flow_m3_per_s: f64) -> f64 { + let area = self.geometry.area(); + if area > 0.0 { + flow_m3_per_s / area + } else { + 0.0 + } + } + + /// Calculates the Reynolds number. + /// + /// Re = ρ × v × D / μ + pub fn reynolds_number(&self, flow_m3_per_s: f64) -> f64 { + let velocity = self.velocity(flow_m3_per_s); + velocity * self.geometry.diameter_m * self.fluid_density_kg_per_m3 + / self.fluid_viscosity_pa_s + } + + /// Calculates the Darcy friction factor using Haaland equation. + pub fn friction_factor(&self, flow_m3_per_s: f64) -> f64 { + let rel_roughness = self.geometry.relative_roughness(); + let re = self.reynolds_number(flow_m3_per_s); + friction_factor::haaland(rel_roughness, re) + } + + /// Calculates the pressure drop using Darcy-Weisbach equation. + /// + /// ΔP = f × (L/D) × (ρ × v² / 2) + /// + /// # Arguments + /// + /// * `flow_m3_per_s` - Volumetric flow rate in m³/s + /// + /// # Returns + /// + /// Pressure drop in Pascals (positive value) + pub fn pressure_drop(&self, flow_m3_per_s: f64) -> f64 { + if flow_m3_per_s <= 0.0 { + return 0.0; + } + + let velocity = self.velocity(flow_m3_per_s); + let f = self.friction_factor(flow_m3_per_s); + let ld = self.geometry.ld_ratio(); + + // Darcy-Weisbach nominal: ΔP_nominal = f × (L/D) × (ρ × v² / 2); ΔP_eff = f_dp × ΔP_nominal + let dp_nominal = f * ld * self.fluid_density_kg_per_m3 * velocity * velocity / 2.0; + dp_nominal * self.calib.f_dp + } + + /// Calculates mass flow from volumetric flow. + pub fn mass_flow_from_volumetric(&self, flow_m3_per_s: f64) -> MassFlow { + MassFlow::from_kg_per_s(flow_m3_per_s * self.fluid_density_kg_per_m3) + } + + /// Calculates volumetric flow from mass flow. + pub fn volumetric_from_mass_flow(&self, mass_flow: MassFlow) -> f64 { + mass_flow.to_kg_per_s() / self.fluid_density_kg_per_m3 + } + + /// Returns the pipe geometry. + pub fn geometry(&self) -> &PipeGeometry { + &self.geometry + } + + /// Returns the fluid density. + pub fn fluid_density(&self) -> f64 { + self.fluid_density_kg_per_m3 + } + + /// Returns the fluid viscosity. + pub fn fluid_viscosity(&self) -> f64 { + self.fluid_viscosity_pa_s + } + + /// Returns calibration factors (f_dp for pressure drop scaling). + pub fn calib(&self) -> &Calib { + &self.calib + } + + /// Sets calibration factors. + pub fn set_calib(&mut self, calib: Calib) { + self.calib = calib; + } + + /// Returns both ports as a slice. + pub fn get_ports_slice(&self) -> [&Port; 2] { + [&self.port_inlet, &self.port_outlet] + } +} + +impl Component for Pipe { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + if residuals.len() != self.n_equations() { + return Err(ComponentError::InvalidResidualDimensions { + expected: self.n_equations(), + actual: residuals.len(), + }); + } + + match self.operational_state { + OperationalState::Off => { + // Blocked pipe: no flow + residuals[0] = state[0]; + return Ok(()); + } + OperationalState::Bypass => { + // No pressure drop (perfect pipe) + residuals[0] = 0.0; + return Ok(()); + } + OperationalState::On => {} + } + + if state.is_empty() { + return Err(ComponentError::InvalidStateDimensions { + expected: 1, + actual: 0, + }); + } + + let mass_flow_kg_s = state[0]; + let flow_m3_s = mass_flow_kg_s / self.fluid_density_kg_per_m3; + + // Calculate pressure drop + let dp_calc = self.pressure_drop(flow_m3_s); + + // Get actual pressure difference + let p_in = self.port_inlet.pressure().to_pascals(); + let p_out = self.port_outlet.pressure().to_pascals(); + let dp_actual = p_in - p_out; + + // Residual: calculated drop - actual drop = 0 + residuals[0] = dp_calc - dp_actual; + + Ok(()) + } + + fn jacobian_entries( + &self, + state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + if state.is_empty() { + return Err(ComponentError::InvalidStateDimensions { + expected: 1, + actual: 0, + }); + } + + let mass_flow_kg_s = state[0]; + let flow_m3_s = mass_flow_kg_s / self.fluid_density_kg_per_m3; + + // Numerical derivative of pressure drop with respect to mass flow + let h = 0.001; + let dp_plus = self.pressure_drop(flow_m3_s + h / self.fluid_density_kg_per_m3); + let dp_minus = self.pressure_drop((flow_m3_s - h / self.fluid_density_kg_per_m3).max(0.0)); + let dp_dm = (dp_plus - dp_minus) / (2.0 * h); + + jacobian.add_entry(0, 0, dp_dm); + + Ok(()) + } + + fn n_equations(&self) -> usize { + 1 + } + + fn get_ports(&self) -> &[ConnectedPort] { + &[] + } +} + +impl StateManageable for Pipe { + fn state(&self) -> OperationalState { + self.operational_state + } + + fn set_state(&mut self, state: OperationalState) -> Result<(), ComponentError> { + if self.operational_state.can_transition_to(state) { + let from = self.operational_state; + self.operational_state = state; + self.on_state_change(from, state); + Ok(()) + } else { + Err(ComponentError::InvalidStateTransition { + from: self.operational_state, + to: state, + reason: "Transition not allowed".to_string(), + }) + } + } + + fn can_transition_to(&self, target: OperationalState) -> bool { + self.operational_state.can_transition_to(target) + } + + fn circuit_id(&self) -> &CircuitId { + &self.circuit_id + } + + fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.circuit_id = circuit_id; + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::port::FluidId; + use approx::assert_relative_eq; + use entropyk_core::{Enthalpy, Pressure}; + + fn create_test_geometry() -> PipeGeometry { + PipeGeometry::steel(10.0, 0.05).unwrap() + } + + fn create_test_pipe_connected() -> Pipe { + let geometry = create_test_geometry(); + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + let outlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + let (inlet_conn, outlet_conn) = inlet.connect(outlet).unwrap(); + + // Water at 20°C: ρ ≈ 998 kg/m³, μ ≈ 0.001 Pa·s + Pipe { + geometry, + port_inlet: inlet_conn, + port_outlet: outlet_conn, + fluid_density_kg_per_m3: 998.0, + fluid_viscosity_pa_s: 0.001, + calib: Calib::default(), + circuit_id: CircuitId::default(), + operational_state: OperationalState::default(), + _state: PhantomData, + } + } + + #[test] + fn test_pipe_geometry_creation() { + let geo = create_test_geometry(); + assert_eq!(geo.length_m, 10.0); + assert_eq!(geo.diameter_m, 0.05); + assert_eq!(geo.ld_ratio(), 200.0); + } + + #[test] + fn test_pipe_geometry_area() { + let geo = create_test_geometry(); + let area = geo.area(); + let expected = std::f64::consts::PI * 0.05 * 0.05 / 4.0; + assert_relative_eq!(area, expected, epsilon = 1e-10); + } + + #[test] + fn test_pipe_geometry_invalid() { + assert!(PipeGeometry::new(-1.0, 0.05, 0.001).is_err()); + assert!(PipeGeometry::new(10.0, -0.05, 0.001).is_err()); + assert!(PipeGeometry::new(10.0, 0.05, -0.001).is_err()); + } + + #[test] + fn test_friction_factor_laminar() { + // For Re = 1000 (laminar), f = 64/1000 = 0.064 + let f = friction_factor::haaland(0.001, 1000.0); + assert_relative_eq!(f, 0.064, epsilon = 1e-10); + } + + #[test] + fn test_friction_factor_turbulent() { + // For smooth pipe at Re = 100000 + let f = friction_factor::haaland(0.0, 100000.0); + // Should be around 0.018 for this Reynolds number + assert!(f > 0.01); + assert!(f < 0.03); + } + + #[test] + fn test_friction_factor_transition() { + // Near transition region (Re ≈ 2300) + let f_lam = friction_factor::haaland(0.001, 2000.0); + let f_turb = friction_factor::haaland(0.001, 3000.0); + + // Both should be positive and reasonable + assert!(f_lam > 0.0); + assert!(f_turb > 0.0); + } + + #[test] + fn test_friction_factor_zero_flow_regularization() { + // Re = 0 or very small must not cause division by zero (Story 3.5) + let f0_haaland = friction_factor::haaland(0.001, 0.0); + let f0_sj = friction_factor::swamee_jain(0.001, 0.0); + assert!(f0_haaland.is_finite()); + assert!(f0_sj.is_finite()); + assert_relative_eq!(f0_haaland, 0.02, epsilon = 1e-10); + assert_relative_eq!(f0_sj, 0.02, epsilon = 1e-10); + + let f_small_haaland = friction_factor::haaland(0.001, 0.5); + let f_small_sj = friction_factor::swamee_jain(0.001, 0.5); + assert!(f_small_haaland.is_finite()); + assert!(f_small_sj.is_finite()); + } + + #[test] + fn test_pipe_pressure_drop_zero_and_small_flow() { + let pipe = create_test_pipe_connected(); + let dp_zero = pipe.pressure_drop(0.0); + assert_eq!(dp_zero, 0.0); + let dp_tiny = pipe.pressure_drop(1e-15); + assert!(dp_tiny.is_finite()); + assert!(dp_tiny >= 0.0); + } + + #[test] + fn test_pipe_small_nonzero_flow_continuity() { + use entropyk_core::MIN_MASS_FLOW_REGULARIZATION_KG_S; + let pipe = create_test_pipe_connected(); + + let dp_zero = pipe.pressure_drop(0.0); + let dp_epsilon = + pipe.pressure_drop(MIN_MASS_FLOW_REGULARIZATION_KG_S / pipe.fluid_density()); + let dp_normal = pipe.pressure_drop(0.01); + + assert!(dp_zero.is_finite()); + assert!(dp_epsilon.is_finite()); + assert!(dp_normal.is_finite()); + + assert!( + dp_epsilon < dp_normal, + "tiny flow should have smaller dp than normal" + ); + assert!( + dp_epsilon >= 0.0, + "dp should be non-negative at epsilon flow" + ); + } + + #[test] + fn test_pipe_velocity() { + let pipe = create_test_pipe_connected(); + let flow = 0.01; // 10 L/s + let velocity = pipe.velocity(flow); + + // v = Q / A = 0.01 / (π × 0.05² / 4) ≈ 5.09 m/s + let expected = 0.01 / pipe.geometry().area(); + assert_relative_eq!(velocity, expected, epsilon = 0.01); + } + + #[test] + fn test_pipe_reynolds() { + let pipe = create_test_pipe_connected(); + let flow = 0.01; + let re = pipe.reynolds_number(flow); + + // Re = ρ × v × D / μ + // With water at typical flow, should be turbulent (> 4000) + assert!(re > 4000.0); + assert!(re < 1_000_000.0); + } + + #[test] + fn test_pipe_pressure_drop() { + let pipe = create_test_pipe_connected(); + let flow = 0.005; // 5 L/s + + let dp = pipe.pressure_drop(flow); + + // Should be positive and reasonable (typically < 100 kPa for this pipe) + assert!(dp > 0.0); + assert!(dp < 200_000.0); + } + + #[test] + fn test_pipe_pressure_drop_scales() { + let pipe = create_test_pipe_connected(); + + let dp1 = pipe.pressure_drop(0.005); + let dp2 = pipe.pressure_drop(0.010); // Double the flow + + // Pressure drop should increase (roughly quadruple for turbulent) + assert!(dp2 > dp1); + } + + #[test] + fn test_f_dp_scales_pressure_drop() { + let mut pipe = create_test_pipe_connected(); + let flow = 0.005; + let dp_default = pipe.pressure_drop(flow); + pipe.set_calib(Calib { + f_dp: 1.1, + ..Calib::default() + }); + let dp_calib = pipe.pressure_drop(flow); + assert_relative_eq!(dp_calib / dp_default, 1.1, epsilon = 1e-10); + } + + #[test] + fn test_pipe_component_n_equations() { + let pipe = create_test_pipe_connected(); + assert_eq!(pipe.n_equations(), 1); + } + + #[test] + fn test_pipe_component_compute_residuals() { + let pipe = create_test_pipe_connected(); + let state = vec![5.0]; // 5 kg/s + let mut residuals = vec![0.0; 1]; + + let result = pipe.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + } + + #[test] + fn test_pipe_state_manageable() { + let pipe = create_test_pipe_connected(); + assert_eq!(pipe.state(), OperationalState::On); + assert!(pipe.can_transition_to(OperationalState::Off)); + assert!(pipe.can_transition_to(OperationalState::Bypass)); + } + + #[test] + fn test_roughness_constants() { + assert!(roughness::SMOOTH < roughness::STEEL_COMMERCIAL); + assert!(roughness::STEEL_COMMERCIAL < roughness::CAST_IRON); + assert!(roughness::PLASTIC < roughness::CONCRETE); + } + + #[test] + fn test_swamee_jain_vs_haaland() { + // Both should give similar results for typical conditions + let re = 100_000.0; + let rr = 0.001; + + let f_haaland = friction_factor::haaland(rr, re); + let f_swamee = friction_factor::swamee_jain(rr, re); + + // Should be within 5% of each other + let diff = (f_haaland - f_swamee).abs() / f_haaland; + assert!(diff < 0.05); + } + + #[test] + fn test_pipe_for_incompressible_creation() { + let geometry = PipeGeometry::smooth(5.0, 0.025).unwrap(); + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + let outlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + + // ρ, μ from IncompressibleBackend at design point (e.g. water 20°C) + let pipe = Pipe::for_incompressible(geometry, inlet, outlet, 998.0, 0.001).unwrap(); + + assert_relative_eq!(pipe.fluid_density(), 998.0, epsilon = 1e-6); + assert_relative_eq!(pipe.fluid_viscosity(), 0.001, epsilon = 1e-9); + assert_eq!(pipe.fluid_id().as_str(), "Water"); + } + + #[test] + fn test_pipe_for_incompressible_glycol() { + // Glycol has different ρ, μ than water - user provides from backend + let geometry = PipeGeometry::smooth(5.0, 0.025).unwrap(); + let inlet = Port::new( + FluidId::new("EthyleneGlycol30"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + let outlet = Port::new( + FluidId::new("EthyleneGlycol30"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + + let pipe = Pipe::for_incompressible(geometry, inlet, outlet, 1055.0, 0.0022).unwrap(); + assert_relative_eq!(pipe.fluid_density(), 1055.0, epsilon = 1e-6); + assert_eq!(pipe.fluid_id().as_str(), "EthyleneGlycol30"); + } + + #[test] + fn test_pipe_for_refrigerant_creation() { + let geometry = PipeGeometry::smooth(3.0, 0.012).unwrap(); + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250000.0), + ); + + let pipe = Pipe::for_refrigerant(geometry, inlet, outlet, 1140.0, 0.0002).unwrap(); + + assert_eq!(pipe.fluid_id().as_str(), "R134a"); + assert_relative_eq!(pipe.fluid_density(), 1140.0, epsilon = 1e-6); + assert_relative_eq!(pipe.fluid_viscosity(), 0.0002, epsilon = 1e-9); + } + + #[test] + fn test_pipe_inlet_outlet_same_fluid() { + // Pipe::new and helpers require inlet/outlet same FluidId + let geometry = PipeGeometry::smooth(5.0, 0.025).unwrap(); + let water_inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + let r134a_outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + + let result = Pipe::for_incompressible(geometry, water_inlet, r134a_outlet, 998.0, 0.001); + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("same fluid")); + } +} diff --git a/crates/components/src/polynomials.rs b/crates/components/src/polynomials.rs new file mode 100644 index 0000000..c9683a7 --- /dev/null +++ b/crates/components/src/polynomials.rs @@ -0,0 +1,702 @@ +//! Polynomial curve models for component performance characterization. +//! +//! This module provides polynomial curve implementations for: +//! - 1D polynomials: Pump curves (Q-H, efficiency), Fan curves +//! - 2D polynomials: Compressor maps based on SST/SDT +//! +//! ## 1D Polynomial (Pump/Fan Curves) +//! +//! ```text +//! y = c0 + c1*x + c2*x² + c3*x³ + ... +//! ``` +//! +//! ## 2D Polynomial (Compressor Maps) +//! +//! ```text +//! z = Σ a_ij * x^i * y^j +//! ``` +//! +//! Where x = SST (Saturated Suction Temperature) +//! and y = SDT (Saturated Discharge Temperature) + +use crate::ComponentError; +use serde::{Deserialize, Serialize}; + +/// 1D Polynomial curve for component performance modeling. +/// +/// Used for pump head curves, fan static pressure curves, and efficiency curves. +/// +/// # Example +/// +/// ``` +/// use entropyk_components::polynomials::Polynomial1D; +/// +/// // Pump curve: H = 50 - 0.1*Q - 0.001*Q² +/// let curve = Polynomial1D::new(vec![50.0, -0.1, -0.001]); +/// +/// // Evaluate at Q = 100 m³/h +/// let head = curve.evaluate(100.0); +/// assert!(head > 0.0); +/// ``` +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct Polynomial1D { + /// Polynomial coefficients [c0, c1, c2, ...] for y = c0 + c1*x + c2*x² + ... + coefficients: Vec, +} + +impl Polynomial1D { + /// Creates a new 1D polynomial from coefficients. + /// + /// # Arguments + /// + /// * `coefficients` - Coefficients [c0, c1, c2, ...] where y = c0 + c1*x + c2*x² + ... + /// + /// # Example + /// + /// ``` + /// use entropyk_components::polynomials::Polynomial1D; + /// + /// // Linear: y = 2x + 3 + /// let linear = Polynomial1D::new(vec![3.0, 2.0]); + /// + /// // Quadratic: y = 1 + 2x + 3x² + /// let quadratic = Polynomial1D::new(vec![1.0, 2.0, 3.0]); + /// ``` + pub fn new(coefficients: Vec) -> Self { + Self { coefficients } + } + + /// Creates a constant polynomial (degree 0). + pub fn constant(value: f64) -> Self { + Self::new(vec![value]) + } + + /// Creates a linear polynomial: y = a + b*x. + pub fn linear(a: f64, b: f64) -> Self { + Self::new(vec![a, b]) + } + + /// Creates a quadratic polynomial: y = a + b*x + c*x². + pub fn quadratic(a: f64, b: f64, c: f64) -> Self { + Self::new(vec![a, b, c]) + } + + /// Creates a cubic polynomial: y = a + b*x + c*x² + d*x³. + pub fn cubic(a: f64, b: f64, c: f64, d: f64) -> Self { + Self::new(vec![a, b, c, d]) + } + + /// Evaluates the polynomial at point x using Horner's method. + /// + /// # Arguments + /// + /// * `x` - The point at which to evaluate + /// + /// # Returns + /// + /// The polynomial value y = P(x) + /// + /// # Example + /// + /// ``` + /// use entropyk_components::polynomials::Polynomial1D; + /// + /// let p = Polynomial1D::quadratic(1.0, 2.0, 3.0); + /// // y = 1 + 2*2 + 3*4 = 1 + 4 + 12 = 17 + /// assert!((p.evaluate(2.0) - 17.0).abs() < 1e-10); + /// ``` + pub fn evaluate(&self, x: f64) -> f64 { + if self.coefficients.is_empty() { + return 0.0; + } + + // Horner's method: efficient polynomial evaluation + self.coefficients + .iter() + .rev() + .fold(0.0, |acc, &c| acc * x + c) + } + + /// Computes the derivative of the polynomial at point x. + /// + /// # Arguments + /// + /// * `x` - The point at which to evaluate the derivative + /// + /// # Example + /// + /// ``` + /// use entropyk_components::polynomials::Polynomial1D; + /// + /// let p = Polynomial1D::quadratic(1.0, 2.0, 3.0); + /// // dy/dx = 2 + 6x, at x=2: dy/dx = 2 + 12 = 14 + /// assert!((p.derivative(2.0) - 14.0).abs() < 1e-10); + /// ``` + pub fn derivative(&self, x: f64) -> f64 { + if self.coefficients.len() <= 1 { + return 0.0; + } + + // Derivative coefficients: [c1, 2*c2, 3*c3, ...] + let deriv_coeffs: Vec = self.coefficients[1..] + .iter() + .enumerate() + .map(|(i, &c)| c * (i + 1) as f64) + .collect(); + + Polynomial1D::new(deriv_coeffs).evaluate(x) + } + + /// Returns the degree of the polynomial. + pub fn degree(&self) -> usize { + if self.coefficients.is_empty() { + 0 + } else { + self.coefficients.len() - 1 + } + } + + /// Returns a reference to the coefficients. + pub fn coefficients(&self) -> &[f64] { + &self.coefficients + } + + /// Validates that all coefficients are finite (not NaN or infinite). + pub fn validate(&self) -> Result<(), ComponentError> { + for (i, &c) in self.coefficients.iter().enumerate() { + if c.is_nan() { + return Err(ComponentError::InvalidState(format!( + "Coefficient {} is NaN", + i + ))); + } + if c.is_infinite() { + return Err(ComponentError::InvalidState(format!( + "Coefficient {} is infinite", + i + ))); + } + } + Ok(()) + } +} + +impl Default for Polynomial1D { + fn default() -> Self { + Self::constant(0.0) + } +} + +/// 2D Polynomial for compressor maps based on SST/SDT. +/// +/// Models performance as a function of two variables (e.g., SST and SDT): +/// +/// ```text +/// z = Σ a_ij * x^i * y^j for i=0..nx, j=0..ny +/// ``` +/// +/// # Example +/// +/// ``` +/// use entropyk_components::polynomials::Polynomial2D; +/// +/// // Simple bilinear: z = a00 + a10*x + a01*y + a11*x*y +/// let coeffs = vec![ +/// vec![1.0, 0.5], // a00, a01 +/// vec![2.0, 0.1], // a10, a11 +/// ]; +/// let poly = Polynomial2D::new(coeffs); +/// +/// let z = poly.evaluate(10.0, 20.0); +/// ``` +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct Polynomial2D { + /// Coefficient matrix where coeffs[i][j] is the coefficient for x^i * y^j + /// coeffs[i] contains coefficients for all j values at degree i in x + coefficients: Vec>, +} + +impl Polynomial2D { + /// Creates a new 2D polynomial from coefficient matrix. + /// + /// # Arguments + /// + /// * `coefficients` - Matrix where coefficients[i][j] is coefficient for x^i * y^j + /// + /// # Example + /// + /// ``` + /// use entropyk_components::polynomials::Polynomial2D; + /// + /// // z = 5 + 2*x + 3*y + 0.5*x*y + /// let coeffs = vec![ + /// vec![5.0, 3.0], // Constant and y term + /// vec![2.0, 0.5], // x term and x*y term + /// ]; + /// let poly = Polynomial2D::new(coeffs); + /// ``` + pub fn new(coefficients: Vec>) -> Self { + Self { coefficients } + } + + /// Creates a constant 2D polynomial. + pub fn constant(value: f64) -> Self { + Self::new(vec![vec![value]]) + } + + /// Creates a bilinear polynomial: z = a00 + a10*x + a01*y + a11*x*y. + pub fn bilinear(a00: f64, a10: f64, a01: f64, a11: f64) -> Self { + Self::new(vec![vec![a00, a01], vec![a10, a11]]) + } + + /// Creates a biquadratic polynomial (degree 2 in both variables). + /// + /// z = a00 + a10*x + a01*y + a20*x² + a11*x*y + a02*y² + pub fn biquadratic(a00: f64, a10: f64, a01: f64, a20: f64, a11: f64, a02: f64) -> Self { + Self::new(vec![vec![a00, a01, a02], vec![a10, a11], vec![a20]]) + } + + /// Evaluates the polynomial at (x, y). + /// + /// # Arguments + /// + /// * `x` - First variable (e.g., SST) + /// * `y` - Second variable (e.g., SDT) + /// + /// # Returns + /// + /// The polynomial value z = P(x, y) + pub fn evaluate(&self, x: f64, y: f64) -> f64 { + let mut result = 0.0; + + for (i, row) in self.coefficients.iter().enumerate() { + let x_pow = x.powi(i as i32); + for (j, &coeff) in row.iter().enumerate() { + result += coeff * x_pow * y.powi(j as i32); + } + } + + result + } + + /// Computes partial derivative with respect to x. + pub fn partial_x(&self, x: f64, y: f64) -> f64 { + let mut result = 0.0; + + for (i, row) in self.coefficients.iter().enumerate() { + if i == 0 { + continue; // ∂/∂x of constant is 0 + } + let x_pow = (i as f64) * x.powi((i - 1) as i32); + for (j, &coeff) in row.iter().enumerate() { + result += coeff * x_pow * y.powi(j as i32); + } + } + + result + } + + /// Computes partial derivative with respect to y. + pub fn partial_y(&self, x: f64, y: f64) -> f64 { + let mut result = 0.0; + + for (i, row) in self.coefficients.iter().enumerate() { + let x_pow = x.powi(i as i32); + for (j, &coeff) in row.iter().enumerate() { + if j == 0 { + continue; // ∂/∂y of constant is 0 + } + result += coeff * x_pow * (j as f64) * y.powi((j - 1) as i32); + } + } + + result + } + + /// Returns the degree in x. + pub fn degree_x(&self) -> usize { + if self.coefficients.is_empty() { + 0 + } else { + self.coefficients.len() - 1 + } + } + + /// Returns the degree in y. + pub fn degree_y(&self) -> usize { + self.coefficients + .iter() + .map(|row| if row.is_empty() { 0 } else { row.len() - 1 }) + .max() + .unwrap_or(0) + } + + /// Returns a reference to the coefficient matrix. + pub fn coefficients(&self) -> &[Vec] { + &self.coefficients + } + + /// Validates that all coefficients are finite. + pub fn validate(&self) -> Result<(), ComponentError> { + for (i, row) in self.coefficients.iter().enumerate() { + for (j, &c) in row.iter().enumerate() { + if c.is_nan() { + return Err(ComponentError::InvalidState(format!( + "Coefficient [{},{}] is NaN", + i, j + ))); + } + if c.is_infinite() { + return Err(ComponentError::InvalidState(format!( + "Coefficient [{},{}] is infinite", + i, j + ))); + } + } + } + Ok(()) + } +} + +impl Default for Polynomial2D { + fn default() -> Self { + Self::constant(0.0) + } +} + +/// Pump/Fan curve set containing head, efficiency, and power polynomials. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PerformanceCurves { + /// Head/Pressure curve: H = f(Q) where Q is volumetric flow + pub head_curve: Polynomial1D, + /// Efficiency curve: η = f(Q) + pub efficiency_curve: Polynomial1D, + /// Optional power curve: P = f(Q) - if not provided, calculated from head and efficiency + pub power_curve: Option, +} + +impl PerformanceCurves { + /// Creates a new performance curve set. + pub fn new( + head_curve: Polynomial1D, + efficiency_curve: Polynomial1D, + power_curve: Option, + ) -> Self { + Self { + head_curve, + efficiency_curve, + power_curve, + } + } + + /// Creates a simple pump curve with head and efficiency only. + pub fn simple(head_curve: Polynomial1D, efficiency_curve: Polynomial1D) -> Self { + Self::new(head_curve, efficiency_curve, None) + } + + /// Validates all curves. + pub fn validate(&self) -> Result<(), ComponentError> { + self.head_curve.validate()?; + self.efficiency_curve.validate()?; + if let Some(ref pc) = self.power_curve { + pc.validate()?; + } + Ok(()) + } +} + +impl Default for PerformanceCurves { + fn default() -> Self { + Self::simple(Polynomial1D::default(), Polynomial1D::default()) + } +} + +/// Affinity laws for variable speed operation. +/// +/// When speed changes from N1 to N2: +/// - Q2/Q1 = N2/N1 (flow proportional to speed) +/// - H2/H1 = (N2/N1)² (head proportional to speed squared) +/// - P2/P1 = (N2/N1)³ (power proportional to speed cubed) +pub struct AffinityLaws; + +impl AffinityLaws { + /// Applies affinity laws to scale flow rate. + /// + /// # Arguments + /// + /// * `flow` - Original flow at speed_ratio = 1.0 + /// * `speed_ratio` - New speed / rated speed (0.0 to 1.0) + /// + /// # Returns + /// + /// Scaled flow rate + pub fn scale_flow(flow: f64, speed_ratio: f64) -> f64 { + flow * speed_ratio + } + + /// Applies affinity laws to scale head/pressure. + /// + /// # Arguments + /// + /// * `head` - Original head at speed_ratio = 1.0 + /// * `speed_ratio` - New speed / rated speed (0.0 to 1.0) + /// + /// # Returns + /// + /// Scaled head + pub fn scale_head(head: f64, speed_ratio: f64) -> f64 { + head * speed_ratio * speed_ratio + } + + /// Applies affinity laws to scale power. + /// + /// # Arguments + /// + /// * `power` - Original power at speed_ratio = 1.0 + /// * `speed_ratio` - New speed / rated speed (0.0 to 1.0) + /// + /// # Returns + /// + /// Scaled power + pub fn scale_power(power: f64, speed_ratio: f64) -> f64 { + power * speed_ratio * speed_ratio * speed_ratio + } + + /// Reverse affinity law: find original flow from scaled flow. + pub fn unscale_flow(scaled_flow: f64, speed_ratio: f64) -> f64 { + if speed_ratio <= 0.0 { + return f64::INFINITY; + } + scaled_flow / speed_ratio + } + + /// Reverse affinity law: find original head from scaled head. + pub fn unscale_head(scaled_head: f64, speed_ratio: f64) -> f64 { + if speed_ratio <= 0.0 { + return f64::INFINITY; + } + scaled_head / (speed_ratio * speed_ratio) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use approx::assert_relative_eq; + + #[test] + fn test_polynomial_1d_constant() { + let p = Polynomial1D::constant(5.0); + assert_relative_eq!(p.evaluate(0.0), 5.0); + assert_relative_eq!(p.evaluate(100.0), 5.0); + assert_relative_eq!(p.derivative(0.0), 0.0); + } + + #[test] + fn test_polynomial_1d_linear() { + // y = 3 + 2x + let p = Polynomial1D::linear(3.0, 2.0); + assert_relative_eq!(p.evaluate(0.0), 3.0); + assert_relative_eq!(p.evaluate(1.0), 5.0); + assert_relative_eq!(p.evaluate(2.0), 7.0); + assert_relative_eq!(p.derivative(5.0), 2.0); + } + + #[test] + fn test_polynomial_1d_quadratic() { + // y = 1 + 2x + 3x² + let p = Polynomial1D::quadratic(1.0, 2.0, 3.0); + assert_relative_eq!(p.evaluate(0.0), 1.0); + assert_relative_eq!(p.evaluate(1.0), 6.0); // 1 + 2 + 3 + assert_relative_eq!(p.evaluate(2.0), 17.0); // 1 + 4 + 12 + assert_relative_eq!(p.derivative(2.0), 14.0); // 2 + 6*2 = 14 + } + + #[test] + fn test_polynomial_1d_cubic() { + // y = 1 + x + x² + x³ + let p = Polynomial1D::cubic(1.0, 1.0, 1.0, 1.0); + assert_relative_eq!(p.evaluate(0.0), 1.0); + assert_relative_eq!(p.evaluate(1.0), 4.0); + assert_relative_eq!(p.evaluate(2.0), 15.0); // 1 + 2 + 4 + 8 + } + + #[test] + fn test_polynomial_1d_empty() { + let p = Polynomial1D::new(vec![]); + assert_relative_eq!(p.evaluate(5.0), 0.0); + } + + #[test] + fn test_polynomial_1d_degree() { + assert_eq!(Polynomial1D::constant(1.0).degree(), 0); + assert_eq!(Polynomial1D::linear(1.0, 2.0).degree(), 1); + assert_eq!(Polynomial1D::quadratic(1.0, 2.0, 3.0).degree(), 2); + assert_eq!(Polynomial1D::cubic(1.0, 2.0, 3.0, 4.0).degree(), 3); + } + + #[test] + fn test_polynomial_1d_validate() { + let valid = Polynomial1D::quadratic(1.0, 2.0, 3.0); + assert!(valid.validate().is_ok()); + + let nan_coeff = Polynomial1D::new(vec![1.0, f64::NAN]); + assert!(nan_coeff.validate().is_err()); + + let inf_coeff = Polynomial1D::new(vec![f64::INFINITY]); + assert!(inf_coeff.validate().is_err()); + } + + #[test] + fn test_polynomial_2d_constant() { + let p = Polynomial2D::constant(5.0); + assert_relative_eq!(p.evaluate(0.0, 0.0), 5.0); + assert_relative_eq!(p.evaluate(10.0, 20.0), 5.0); + } + + #[test] + fn test_polynomial_2d_bilinear() { + // z = 1 + 2*x + 3*y + 0.5*x*y + let p = Polynomial2D::bilinear(1.0, 2.0, 3.0, 0.5); + + // At (0,0): z = 1 + assert_relative_eq!(p.evaluate(0.0, 0.0), 1.0); + + // At (1,0): z = 1 + 2 = 3 + assert_relative_eq!(p.evaluate(1.0, 0.0), 3.0); + + // At (0,1): z = 1 + 3 = 4 + assert_relative_eq!(p.evaluate(0.0, 1.0), 4.0); + + // At (2,3): z = 1 + 4 + 9 + 3 = 17 + assert_relative_eq!(p.evaluate(2.0, 3.0), 17.0); + } + + #[test] + fn test_polynomial_2d_biquadratic() { + let p = Polynomial2D::biquadratic(1.0, 2.0, 3.0, 4.0, 5.0, 6.0); + + // At (0,0): z = 1 + assert_relative_eq!(p.evaluate(0.0, 0.0), 1.0); + + // At (1,1): z = 1 + 2 + 3 + 4 + 5 + 6 = 21 + assert_relative_eq!(p.evaluate(1.0, 1.0), 21.0); + } + + #[test] + fn test_polynomial_2d_partial_derivatives() { + // z = 1 + 2*x + 3*y + 4*x*y + let p = Polynomial2D::bilinear(1.0, 2.0, 3.0, 4.0); + + // ∂z/∂x = 2 + 4*y + assert_relative_eq!(p.partial_x(0.0, 0.0), 2.0); + assert_relative_eq!(p.partial_x(0.0, 1.0), 6.0); + + // ∂z/∂y = 3 + 4*x + assert_relative_eq!(p.partial_y(0.0, 0.0), 3.0); + assert_relative_eq!(p.partial_y(1.0, 0.0), 7.0); + } + + #[test] + fn test_polynomial_2d_degrees() { + let p = Polynomial2D::bilinear(1.0, 2.0, 3.0, 4.0); + assert_eq!(p.degree_x(), 1); + assert_eq!(p.degree_y(), 1); + + let biq = Polynomial2D::biquadratic(1.0, 2.0, 3.0, 4.0, 5.0, 6.0); + assert_eq!(biq.degree_x(), 2); + assert_eq!(biq.degree_y(), 2); + } + + #[test] + fn test_polynomial_2d_validate() { + let valid = Polynomial2D::bilinear(1.0, 2.0, 3.0, 4.0); + assert!(valid.validate().is_ok()); + + let nan_coeff = Polynomial2D::new(vec![vec![1.0, f64::NAN]]); + assert!(nan_coeff.validate().is_err()); + } + + #[test] + fn test_affinity_laws_flow() { + let flow = 100.0; + + // At full speed: no change + assert_relative_eq!(AffinityLaws::scale_flow(flow, 1.0), 100.0); + + // At half speed: half flow + assert_relative_eq!(AffinityLaws::scale_flow(flow, 0.5), 50.0); + + // At 80% speed: 80% flow + assert_relative_eq!(AffinityLaws::scale_flow(flow, 0.8), 80.0); + } + + #[test] + fn test_affinity_laws_head() { + let head = 100.0; + + // At full speed: no change + assert_relative_eq!(AffinityLaws::scale_head(head, 1.0), 100.0); + + // At half speed: 25% head + assert_relative_eq!(AffinityLaws::scale_head(head, 0.5), 25.0); + + // At 80% speed: 64% head + assert_relative_eq!(AffinityLaws::scale_head(head, 0.8), 64.0); + } + + #[test] + fn test_affinity_laws_power() { + let power = 1000.0; + + // At full speed: no change + assert_relative_eq!(AffinityLaws::scale_power(power, 1.0), 1000.0); + + // At half speed: 12.5% power + assert_relative_eq!(AffinityLaws::scale_power(power, 0.5), 125.0); + + // At 80% speed: 51.2% power + assert_relative_eq!(AffinityLaws::scale_power(power, 0.8), 512.0); + } + + #[test] + fn test_affinity_laws_reverse() { + let flow = 50.0; + let head = 25.0; + let speed = 0.5; + + // Reverse scaling + assert_relative_eq!(AffinityLaws::unscale_flow(flow, speed), 100.0); + assert_relative_eq!(AffinityLaws::unscale_head(head, speed), 100.0); + } + + #[test] + fn test_performance_curves() { + // Typical pump curve: H = 50 - 0.1*Q - 0.001*Q² + let head = Polynomial1D::quadratic(50.0, -0.1, -0.001); + // Efficiency: η = 0.4 + 0.02*Q - 0.0001*Q² + let eff = Polynomial1D::quadratic(0.4, 0.02, -0.0001); + + let curves = PerformanceCurves::simple(head.clone(), eff.clone()); + + assert!(curves.validate().is_ok()); + assert_relative_eq!(curves.head_curve.evaluate(0.0), 50.0); + assert_relative_eq!(curves.efficiency_curve.evaluate(0.0), 0.4); + } + + #[test] + fn test_pump_curve_realistic() { + // Realistic pump curve for a small centrifugal pump + // H (m) = 30 - 0.05*Q - 0.0005*Q², where Q is in m³/h + let head_curve = Polynomial1D::quadratic(30.0, -0.05, -0.0005); + + // At Q=0 (shut-off): H = 30 m + assert_relative_eq!(head_curve.evaluate(0.0), 30.0); + + // At Q=100 m³/h: H = 30 - 5 - 5 = 20 m + assert_relative_eq!(head_curve.evaluate(100.0), 20.0); + + // At Q=200 m³/h: H = 30 - 10 - 20 = 0 m (run-out) + assert_relative_eq!(head_curve.evaluate(200.0), 0.0, epsilon = 1e-10); + } +} diff --git a/crates/components/src/port.rs b/crates/components/src/port.rs new file mode 100644 index 0000000..6dab4f3 --- /dev/null +++ b/crates/components/src/port.rs @@ -0,0 +1,753 @@ +//! Port and Connection System +//! +//! This module provides the foundation for connecting thermodynamic components +//! using the Type-State pattern for compile-time connection safety. +//! +//! ## Type-State Pattern +//! +//! Ports have two states: +//! - `Disconnected`: Initial state, cannot be used in solver +//! - `Connected`: Linked to another port, ready for simulation +//! +//! State transitions are enforced at compile time: +//! ```text +//! Port --connect()--> Port +//! ↑ │ +//! └───────── (no way back) ────────────┘ +//! ``` +//! +//! ## Connection Semantics +//! +//! Connected ports validate continuity (pressure/enthalpy match) at connection time, +//! but track values independently afterward. This allows the solver to update port +//! states during iteration without requiring synchronization. +//! +//! ## Example +//! +//! ```rust +//! use entropyk_components::port::{Port, Disconnected, Connected, FluidId, ConnectionError}; +//! use entropyk_core::{Pressure, Enthalpy}; +//! +//! // Create two disconnected ports +//! let port1 = Port::new(FluidId::new("R134a"), Pressure::from_bar(1.0), Enthalpy::from_joules_per_kg(400000.0)); +//! let port2 = Port::new(FluidId::new("R134a"), Pressure::from_bar(1.0), Enthalpy::from_joules_per_kg(400000.0)); +//! +//! // Connect them +//! let (connected1, connected2) = port1.connect(port2)?; +//! +//! // Ports track values independently for solver flexibility +//! assert_eq!(connected1.pressure().to_bar(), 1.0); +//! # Ok::<(), ConnectionError>(()) +//! ``` + +use entropyk_core::{Enthalpy, Pressure}; +use std::fmt; +use std::marker::PhantomData; +use thiserror::Error; + +/// Default relative tolerance for pressure matching (0.01% = 100 ppm). +/// For 1 bar = 100,000 Pa, this allows 10 Pa difference. +const PRESSURE_TOLERANCE_FRACTION: f64 = 1e-4; + +/// Default absolute tolerance for enthalpy matching (100 J/kg). +/// This is approximately 0.024 kJ/kg, reasonable for HVAC calculations. +const ENTHALPY_TOLERANCE_J_KG: f64 = 100.0; + +/// Minimum absolute pressure tolerance (1 Pa) to avoid issues near zero. +const MIN_PRESSURE_TOLERANCE_PA: f64 = 1.0; + +/// Errors that can occur during port operations. +#[derive(Error, Debug, Clone, PartialEq)] +pub enum ConnectionError { + /// Attempted to connect ports with incompatible fluids. + #[error("Incompatible fluids: cannot connect {from} to {to}")] + IncompatibleFluid { + /// Source fluid identifier + from: String, + /// Target fluid identifier + to: String, + }, + + /// Pressure mismatch at connection point. + #[error( + "Pressure mismatch: {from_pressure} Pa vs {to_pressure} Pa (tolerance: {tolerance} Pa)" + )] + PressureMismatch { + /// Pressure at source port (Pa) + from_pressure: f64, + /// Pressure at target port (Pa) + to_pressure: f64, + /// Tolerance used for comparison (Pa) + tolerance: f64, + }, + + /// Enthalpy mismatch at connection point. + #[error("Enthalpy mismatch: {from_enthalpy} J/kg vs {to_enthalpy} J/kg (tolerance: {tolerance} J/kg)")] + EnthalpyMismatch { + /// Enthalpy at source port (J/kg) + from_enthalpy: f64, + /// Enthalpy at target port (J/kg) + to_enthalpy: f64, + /// Tolerance used for comparison (J/kg) + tolerance: f64, + }, + + /// Attempted to connect a port that is already connected. + #[error("Port is already connected and cannot be reconnected")] + AlreadyConnected, + + /// Detected a cycle in the connection graph. + #[error("Connection would create a cycle in the system topology")] + CycleDetected, + + /// Invalid port index. + #[error( + "Invalid port index {index}: component has {port_count} ports (valid: 0..{max_index})" + )] + InvalidPortIndex { + /// The invalid port index that was requested + index: usize, + /// Number of ports on the component + port_count: usize, + /// Maximum valid index (port_count - 1, or 0 if no ports) + max_index: usize, + }, + + /// Invalid node index. + #[error("Invalid node index: {0}")] + InvalidNodeIndex(usize), +} + +/// Type-state marker for disconnected ports. +/// +/// Ports in this state cannot be used in the solver until connected. +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct Disconnected; + +/// Type-state marker for connected ports. +/// +/// Ports in this state are linked to another port and ready for simulation. +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct Connected; + +/// Identifier for thermodynamic fluids. +/// +/// Used to ensure only compatible fluids are connected. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct FluidId(String); + +impl FluidId { + /// Creates a new fluid identifier. + /// + /// # Arguments + /// + /// * `id` - Unique identifier for the fluid (e.g., "R134a", "Water") + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::port::FluidId; + /// + /// let fluid = FluidId::new("R134a"); + /// ``` + pub fn new(id: impl Into) -> Self { + FluidId(id.into()) + } + + /// Returns the fluid identifier as a string slice. + pub fn as_str(&self) -> &str { + &self.0 + } +} + +impl fmt::Display for FluidId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +/// A thermodynamic port for connecting components. +/// +/// Ports use the Type-State pattern to enforce connection safety at compile time. +/// A `Port` must be connected before it can be used in simulations. +/// +/// # Type Parameters +/// +/// * `State` - Either `Disconnected` or `Connected`, tracking the port's state +/// +/// # Examples +/// +/// ``` +/// use entropyk_components::port::{Port, Disconnected, FluidId}; +/// use entropyk_core::{Pressure, Enthalpy}; +/// +/// // Create a disconnected port +/// let port: Port = Port::new( +/// FluidId::new("R134a"), +/// Pressure::from_bar(1.0), +/// Enthalpy::from_joules_per_kg(400000.0) +/// ); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct Port { + fluid_id: FluidId, + pressure: Pressure, + enthalpy: Enthalpy, + _state: PhantomData, +} + +/// Helper to validate connection parameters. +fn validate_connection_params( + from_fluid: &FluidId, + from_p: Pressure, + from_h: Enthalpy, + to_fluid: &FluidId, + to_p: Pressure, + to_h: Enthalpy, +) -> Result<(), ConnectionError> { + if from_fluid != to_fluid { + return Err(ConnectionError::IncompatibleFluid { + from: from_fluid.to_string(), + to: to_fluid.to_string(), + }); + } + + let pressure_tol = + (from_p.to_pascals().abs() * PRESSURE_TOLERANCE_FRACTION).max(MIN_PRESSURE_TOLERANCE_PA); + let pressure_diff = (from_p.to_pascals() - to_p.to_pascals()).abs(); + if pressure_diff > pressure_tol { + return Err(ConnectionError::PressureMismatch { + from_pressure: from_p.to_pascals(), + to_pressure: to_p.to_pascals(), + tolerance: pressure_tol, + }); + } + + let enthalpy_diff = (from_h.to_joules_per_kg() - to_h.to_joules_per_kg()).abs(); + if enthalpy_diff > ENTHALPY_TOLERANCE_J_KG { + return Err(ConnectionError::EnthalpyMismatch { + from_enthalpy: from_h.to_joules_per_kg(), + to_enthalpy: to_h.to_joules_per_kg(), + tolerance: ENTHALPY_TOLERANCE_J_KG, + }); + } + + Ok(()) +} + +impl Port { + /// Creates a new disconnected port. + /// + /// # Arguments + /// + /// * `fluid_id` - Identifier for the fluid flowing through this port + /// * `pressure` - Initial pressure at the port + /// * `enthalpy` - Initial specific enthalpy at the port + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::port::{Port, FluidId}; + /// use entropyk_core::{Pressure, Enthalpy}; + /// + /// let port = Port::new( + /// FluidId::new("R134a"), + /// Pressure::from_bar(1.0), + /// Enthalpy::from_joules_per_kg(400000.0) + /// ); + /// ``` + pub fn new(fluid_id: FluidId, pressure: Pressure, enthalpy: Enthalpy) -> Self { + Self { + fluid_id, + pressure, + enthalpy, + _state: PhantomData, + } + } + + /// Returns the fluid identifier. + pub fn fluid_id(&self) -> &FluidId { + &self.fluid_id + } + + /// Returns the current pressure. + pub fn pressure(&self) -> Pressure { + self.pressure + } + + /// Returns the current enthalpy. + pub fn enthalpy(&self) -> Enthalpy { + self.enthalpy + } + + /// Connects two disconnected ports. + /// + /// Validates that: + /// - Both ports have the same fluid type + /// - Pressures match within relative tolerance + /// - Enthalpies match within absolute tolerance + /// + /// After connection, ports track values independently, allowing the solver + /// to update states during iteration. + /// + /// # Arguments + /// + /// * `other` - The port to connect to + /// + /// # Returns + /// + /// Returns a tuple of `(Port, Port)` on success, + /// or a `ConnectionError` if validation fails. + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::port::{Port, FluidId, ConnectionError}; + /// use entropyk_core::{Pressure, Enthalpy}; + /// + /// let port1 = Port::new( + /// FluidId::new("R134a"), + /// Pressure::from_pascals(100000.0), + /// Enthalpy::from_joules_per_kg(400000.0) + /// ); + /// let port2 = Port::new( + /// FluidId::new("R134a"), + /// Pressure::from_pascals(100000.0), + /// Enthalpy::from_joules_per_kg(400000.0) + /// ); + /// + /// let (connected1, connected2) = port1.connect(port2)?; + /// # Ok::<(), ConnectionError>(()) + /// ``` + pub fn connect( + self, + other: Port, + ) -> Result<(Port, Port), ConnectionError> { + validate_connection_params( + &self.fluid_id, + self.pressure, + self.enthalpy, + &other.fluid_id, + other.pressure, + other.enthalpy, + )?; + + let avg_pressure = Pressure::from_pascals( + (self.pressure.to_pascals() + other.pressure.to_pascals()) / 2.0, + ); + let avg_enthalpy = Enthalpy::from_joules_per_kg( + (self.enthalpy.to_joules_per_kg() + other.enthalpy.to_joules_per_kg()) / 2.0, + ); + + let connected1 = Port { + fluid_id: self.fluid_id, + pressure: avg_pressure, + enthalpy: avg_enthalpy, + _state: PhantomData, + }; + + let connected2 = Port { + fluid_id: other.fluid_id, + pressure: avg_pressure, + enthalpy: avg_enthalpy, + _state: PhantomData, + }; + + Ok((connected1, connected2)) + } +} + +impl Port { + /// Returns the fluid identifier. + pub fn fluid_id(&self) -> &FluidId { + &self.fluid_id + } + + /// Returns the current pressure. + pub fn pressure(&self) -> Pressure { + self.pressure + } + + /// Returns the current enthalpy. + pub fn enthalpy(&self) -> Enthalpy { + self.enthalpy + } + + /// Updates the pressure at this port. + /// + /// # Arguments + /// + /// * `pressure` - The new pressure value + pub fn set_pressure(&mut self, pressure: Pressure) { + self.pressure = pressure; + } + + /// Updates the enthalpy at this port. + /// + /// # Arguments + /// + /// * `enthalpy` - The new enthalpy value + pub fn set_enthalpy(&mut self, enthalpy: Enthalpy) { + self.enthalpy = enthalpy; + } +} + +/// A connected port reference that can be stored in components. +/// +/// This type is object-safe and can be used in trait objects. +pub type ConnectedPort = Port; + +/// Validates that two connected ports are compatible for a flow connection. +/// +/// Uses the same tolerance constants as [`Port::connect`](Port::connect): +/// - Pressure: `max(P * 1e-4, 1 Pa)` +/// - Enthalpy: 100 J/kg +/// +/// # Arguments +/// +/// * `outlet` - Source port (flow direction: outlet → inlet) +/// * `inlet` - Target port +/// +/// # Returns +/// +/// `Ok(())` if ports are compatible, `Err(ConnectionError)` otherwise. +pub fn validate_port_continuity( + outlet: &ConnectedPort, + inlet: &ConnectedPort, +) -> Result<(), ConnectionError> { + validate_connection_params( + &outlet.fluid_id, + outlet.pressure, + outlet.enthalpy, + &inlet.fluid_id, + inlet.pressure, + inlet.enthalpy, + ) +} + +#[cfg(test)] +mod tests { + use super::*; + use approx::assert_relative_eq; + + #[test] + fn test_port_creation() { + let port = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + + assert_eq!(port.fluid_id().as_str(), "R134a"); + assert_relative_eq!(port.pressure().to_bar(), 1.0, epsilon = 1e-10); + assert_relative_eq!( + port.enthalpy().to_joules_per_kg(), + 400000.0, + epsilon = 1e-10 + ); + } + + #[test] + fn test_successful_connection() { + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + let port2 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + + let (connected1, connected2) = port1.connect(port2).unwrap(); + + assert_eq!(connected1.fluid_id().as_str(), "R134a"); + assert_eq!(connected2.fluid_id().as_str(), "R134a"); + assert_relative_eq!( + connected1.pressure().to_pascals(), + 100000.0, + epsilon = 1e-10 + ); + assert_relative_eq!( + connected2.pressure().to_pascals(), + 100000.0, + epsilon = 1e-10 + ); + } + + #[test] + fn test_incompatible_fluid_error() { + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + let port2 = Port::new( + FluidId::new("Water"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + + let result = port1.connect(port2); + + assert!(matches!( + result, + Err(ConnectionError::IncompatibleFluid { .. }) + )); + } + + #[test] + fn test_pressure_mismatch_error() { + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + let port2 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(200000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + + let result = port1.connect(port2); + + assert!(matches!( + result, + Err(ConnectionError::PressureMismatch { .. }) + )); + } + + #[test] + fn test_enthalpy_mismatch_error() { + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + let port2 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(500000.0), + ); + + let result = port1.connect(port2); + + assert!(matches!( + result, + Err(ConnectionError::EnthalpyMismatch { .. }) + )); + } + + #[test] + fn test_connected_port_setters() { + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + let port2 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + + let (mut connected1, _) = port1.connect(port2).unwrap(); + + connected1.set_pressure(Pressure::from_pascals(150000.0)); + connected1.set_enthalpy(Enthalpy::from_joules_per_kg(450000.0)); + + assert_relative_eq!( + connected1.pressure().to_pascals(), + 150000.0, + epsilon = 1e-10 + ); + assert_relative_eq!( + connected1.enthalpy().to_joules_per_kg(), + 450000.0, + epsilon = 1e-10 + ); + } + + #[test] + fn test_ports_track_independently() { + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + let port2 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + + let (mut connected1, connected2) = port1.connect(port2).unwrap(); + + connected1.set_pressure(Pressure::from_pascals(150000.0)); + + // connected2 should NOT see the change - ports are independent + assert_relative_eq!( + connected2.pressure().to_pascals(), + 100000.0, + epsilon = 1e-10 + ); + } + + #[test] + fn test_fluid_id_creation() { + let fluid1 = FluidId::new("R134a"); + let fluid2 = FluidId::new(String::from("Water")); + + assert_eq!(fluid1.as_str(), "R134a"); + assert_eq!(fluid2.as_str(), "Water"); + } + + #[test] + fn test_connection_error_display() { + let err = ConnectionError::IncompatibleFluid { + from: "R134a".to_string(), + to: "Water".to_string(), + }; + let msg = format!("{}", err); + assert!(msg.contains("Incompatible fluids")); + assert!(msg.contains("R134a")); + assert!(msg.contains("Water")); + + let err = ConnectionError::PressureMismatch { + from_pressure: 100000.0, + to_pressure: 200000.0, + tolerance: 10.0, + }; + let msg = format!("{}", err); + assert!(msg.contains("100000 Pa")); + assert!(msg.contains("200000 Pa")); + assert!(msg.contains("tolerance")); + } + + #[test] + fn test_pressure_averaging_on_connection() { + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + let port2 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + + let (connected1, connected2) = port1.connect(port2).unwrap(); + + assert_relative_eq!( + connected1.pressure().to_pascals(), + connected2.pressure().to_pascals(), + epsilon = 1e-10 + ); + } + + #[test] + fn test_pressure_tolerance_with_small_difference() { + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + let port2 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100005.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + + let result = port1.connect(port2); + + assert!( + result.is_ok(), + "5 Pa difference should be within tolerance for 100 kPa pressure" + ); + } + + #[test] + fn test_clone_disconnected_port() { + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100000.0), + Enthalpy::from_joules_per_kg(400000.0), + ); + let port2 = port1.clone(); + + assert_eq!(port1, port2); + } + + #[test] + fn test_fluid_id_equality() { + let f1 = FluidId::new("R134a"); + let f2 = FluidId::new("R134a"); + let f3 = FluidId::new("Water"); + + assert_eq!(f1, f2); + assert_ne!(f1, f3); + } + + #[test] + fn test_already_connected_error() { + let err = ConnectionError::AlreadyConnected; + let msg = format!("{}", err); + assert!(msg.contains("already connected")); + } + + #[test] + fn test_cycle_detected_error() { + let err = ConnectionError::CycleDetected; + let msg = format!("{}", err); + assert!(msg.contains("cycle")); + } + + #[test] + fn test_validate_port_continuity_ok() { + let p1 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100_000.0), + Enthalpy::from_joules_per_kg(400_000.0), + ); + let p2 = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100_000.0), + Enthalpy::from_joules_per_kg(400_000.0), + ); + let (c1, c2) = p1.connect(p2).unwrap(); + assert!(validate_port_continuity(&c1, &c2).is_ok()); + assert!(validate_port_continuity(&c2, &c1).is_ok()); + } + + #[test] + fn test_validate_port_continuity_incompatible_fluid() { + let (r134a, _) = Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100_000.0), + Enthalpy::from_joules_per_kg(400_000.0), + ) + .connect(Port::new( + FluidId::new("R134a"), + Pressure::from_pascals(100_000.0), + Enthalpy::from_joules_per_kg(400_000.0), + )) + .unwrap(); + let (water, _) = Port::new( + FluidId::new("Water"), + Pressure::from_pascals(100_000.0), + Enthalpy::from_joules_per_kg(400_000.0), + ) + .connect(Port::new( + FluidId::new("Water"), + Pressure::from_pascals(100_000.0), + Enthalpy::from_joules_per_kg(400_000.0), + )) + .unwrap(); + assert!(matches!( + validate_port_continuity(&r134a, &water), + Err(ConnectionError::IncompatibleFluid { .. }) + )); + } +} diff --git a/crates/components/src/pump.rs b/crates/components/src/pump.rs new file mode 100644 index 0000000..3919e29 --- /dev/null +++ b/crates/components/src/pump.rs @@ -0,0 +1,780 @@ +//! Pump Component Implementation +//! +//! This module provides a pump component for hydraulic systems using +//! polynomial performance curves and affinity laws for variable speed operation. +//! +//! ## Performance Curves +//! +//! **Head Curve:** H = a₀ + a₁Q + a₂Q² + a₃Q³ +//! +//! **Efficiency Curve:** η = b₀ + b₁Q + b₂Q² +//! +//! **Hydraulic Power:** P_hydraulic = ρ × g × Q × H / η +//! +//! ## Affinity Laws (Variable Speed) +//! +//! When operating at reduced speed (VFD): +//! - Q₂/Q₁ = N₂/N₁ +//! - H₂/H₁ = (N₂/N₁)² +//! - P₂/P₁ = (N₂/N₁)³ + +use crate::polynomials::{AffinityLaws, PerformanceCurves, Polynomial1D}; +use crate::port::{Connected, Disconnected, FluidId, Port}; +use crate::state_machine::StateManageable; +use crate::{ + CircuitId, Component, ComponentError, ConnectedPort, JacobianBuilder, OperationalState, + ResidualVector, SystemState, +}; +use entropyk_core::{MassFlow, Power}; +use serde::{Deserialize, Serialize}; +use std::marker::PhantomData; + +/// Pump performance curve coefficients. +/// +/// Defines the polynomial coefficients for the pump's head-flow curve +/// and efficiency curve. +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +pub struct PumpCurves { + /// Performance curves (head, efficiency, optional power) + curves: PerformanceCurves, +} + +impl PumpCurves { + /// Creates pump curves from performance curves. + pub fn new(curves: PerformanceCurves) -> Result { + curves.validate()?; + Ok(Self { curves }) + } + + /// Creates pump curves from polynomial coefficients. + /// + /// # Arguments + /// + /// * `head_coeffs` - Head curve coefficients [a0, a1, a2, ...] for H = a0 + a1*Q + a2*Q² + /// * `eff_coeffs` - Efficiency coefficients [b0, b1, b2, ...] for η = b0 + b1*Q + b2*Q² + /// + /// # Units + /// + /// * Q (flow) in m³/s + /// * H (head) in meters + /// * η (efficiency) as decimal (0.0 to 1.0) + pub fn from_coefficients( + head_coeffs: Vec, + eff_coeffs: Vec, + ) -> Result { + let head_curve = Polynomial1D::new(head_coeffs); + let eff_curve = Polynomial1D::new(eff_coeffs); + let curves = PerformanceCurves::simple(head_curve, eff_curve); + Self::new(curves) + } + + /// Creates a quadratic pump curve. + /// + /// H = a0 + a1*Q + a2*Q² + /// η = b0 + b1*Q + b2*Q² + pub fn quadratic( + h0: f64, + h1: f64, + h2: f64, + e0: f64, + e1: f64, + e2: f64, + ) -> Result { + Self::from_coefficients(vec![h0, h1, h2], vec![e0, e1, e2]) + } + + /// Creates a cubic pump curve (3rd-order polynomial for head). + /// + /// H = a0 + a1*Q + a2*Q² + a3*Q³ + /// η = b0 + b1*Q + b2*Q² + pub fn cubic( + h0: f64, + h1: f64, + h2: f64, + h3: f64, + e0: f64, + e1: f64, + e2: f64, + ) -> Result { + Self::from_coefficients(vec![h0, h1, h2, h3], vec![e0, e1, e2]) + } + + /// Returns the head at the given flow rate (at full speed). + /// + /// # Arguments + /// + /// * `flow_m3_per_s` - Volumetric flow rate in m³/s + /// + /// # Returns + /// + /// Head in meters + pub fn head_at_flow(&self, flow_m3_per_s: f64) -> f64 { + self.curves.head_curve.evaluate(flow_m3_per_s) + } + + /// Returns the efficiency at the given flow rate (at full speed). + /// + /// # Arguments + /// + /// * `flow_m3_per_s` - Volumetric flow rate in m³/s + /// + /// # Returns + /// + /// Efficiency as decimal (0.0 to 1.0) + pub fn efficiency_at_flow(&self, flow_m3_per_s: f64) -> f64 { + let eta = self.curves.efficiency_curve.evaluate(flow_m3_per_s); + // Clamp efficiency to valid range + eta.clamp(0.0, 1.0) + } + + /// Returns reference to the performance curves. + pub fn curves(&self) -> &PerformanceCurves { + &self.curves + } +} + +impl Default for PumpCurves { + fn default() -> Self { + Self::quadratic(30.0, 0.0, 0.0, 0.7, 0.0, 0.0).unwrap() + } +} + +/// A pump component with polynomial performance curves. +/// +/// The pump uses the Type-State pattern to ensure ports are connected +/// before use in simulations. +/// +/// # Example +/// +/// ```ignore +/// use entropyk_components::pump::{Pump, PumpCurves}; +/// use entropyk_components::port::{FluidId, Port}; +/// use entropyk_core::{Pressure, Enthalpy}; +/// +/// // Create pump curves: H = 30 - 10*Q - 50*Q² (in m and m³/s) +/// let curves = PumpCurves::quadratic(30.0, -10.0, -50.0, 0.5, 0.3, -0.5).unwrap(); +/// +/// let inlet = Port::new( +/// FluidId::new("Water"), +/// Pressure::from_bar(1.0), +/// Enthalpy::from_joules_per_kg(100000.0), +/// ); +/// let outlet = Port::new( +/// FluidId::new("Water"), +/// Pressure::from_bar(1.0), +/// Enthalpy::from_joules_per_kg(100000.0), +/// ); +/// +/// let pump = Pump::new(curves, inlet, outlet, 1000.0).unwrap(); +/// ``` +#[derive(Debug, Clone)] +pub struct Pump { + /// Performance curves + curves: PumpCurves, + /// Inlet port + port_inlet: Port, + /// Outlet port + port_outlet: Port, + /// Fluid density in kg/m³ + fluid_density_kg_per_m3: f64, + /// Speed ratio (0.0 to 1.0), default 1.0 (full speed) + speed_ratio: f64, + /// Circuit identifier + circuit_id: CircuitId, + /// Operational state + operational_state: OperationalState, + /// Phantom data for type state + _state: PhantomData, +} + +impl Pump { + /// Creates a new disconnected pump. + /// + /// # Arguments + /// + /// * `curves` - Pump performance curves + /// * `port_inlet` - Inlet port (disconnected) + /// * `port_outlet` - Outlet port (disconnected) + /// * `fluid_density` - Fluid density in kg/m³ + /// + /// # Errors + /// + /// Returns an error if: + /// - Ports have different fluid types + /// - Fluid density is not positive + pub fn new( + curves: PumpCurves, + port_inlet: Port, + port_outlet: Port, + fluid_density: f64, + ) -> Result { + if port_inlet.fluid_id() != port_outlet.fluid_id() { + return Err(ComponentError::InvalidState( + "Inlet and outlet ports must have the same fluid type".to_string(), + )); + } + + if fluid_density <= 0.0 { + return Err(ComponentError::InvalidState( + "Fluid density must be positive".to_string(), + )); + } + + Ok(Self { + curves, + port_inlet, + port_outlet, + fluid_density_kg_per_m3: fluid_density, + speed_ratio: 1.0, + circuit_id: CircuitId::default(), + operational_state: OperationalState::default(), + _state: PhantomData, + }) + } + + /// Returns the fluid identifier. + pub fn fluid_id(&self) -> &FluidId { + self.port_inlet.fluid_id() + } + + /// Returns the inlet port. + pub fn port_inlet(&self) -> &Port { + &self.port_inlet + } + + /// Returns the outlet port. + pub fn port_outlet(&self) -> &Port { + &self.port_outlet + } + + /// Returns the fluid density. + pub fn fluid_density(&self) -> f64 { + self.fluid_density_kg_per_m3 + } + + /// Returns the performance curves. + pub fn curves(&self) -> &PumpCurves { + &self.curves + } + + /// Returns the speed ratio. + pub fn speed_ratio(&self) -> f64 { + self.speed_ratio + } + + /// Sets the speed ratio (0.0 to 1.0). + pub fn set_speed_ratio(&mut self, ratio: f64) -> Result<(), ComponentError> { + if !(0.0..=1.0).contains(&ratio) { + return Err(ComponentError::InvalidState( + "Speed ratio must be between 0.0 and 1.0".to_string(), + )); + } + self.speed_ratio = ratio; + Ok(()) + } +} + +impl Pump { + /// Returns the inlet port. + pub fn port_inlet(&self) -> &Port { + &self.port_inlet + } + + /// Returns the outlet port. + pub fn port_outlet(&self) -> &Port { + &self.port_outlet + } + + /// Calculates the pressure rise across the pump. + /// + /// Uses the head curve and converts to pressure: + /// ΔP = ρ × g × H + /// + /// Applies affinity laws for variable speed operation. + /// + /// # Arguments + /// + /// * `flow_m3_per_s` - Volumetric flow rate in m³/s + /// + /// # Returns + /// + /// Pressure rise in Pascals + pub fn pressure_rise(&self, flow_m3_per_s: f64) -> f64 { + // Handle zero speed - pump produces no pressure + if self.speed_ratio <= 0.0 { + return 0.0; + } + + // Handle zero flow + if flow_m3_per_s <= 0.0 { + // At zero flow, use the shut-off head scaled by speed + let head_m = self.curves.head_at_flow(0.0); + let actual_head = AffinityLaws::scale_head(head_m, self.speed_ratio); + const G: f64 = 9.80665; // m/s² + return self.fluid_density_kg_per_m3 * G * actual_head; + } + + // Apply affinity law to get equivalent flow at full speed + let equivalent_flow = AffinityLaws::unscale_flow(flow_m3_per_s, self.speed_ratio); + + // Get head at equivalent flow + let head_m = self.curves.head_at_flow(equivalent_flow); + + // Apply affinity law to scale head back to actual speed + let actual_head = AffinityLaws::scale_head(head_m, self.speed_ratio); + + // Convert head to pressure: P = ρ × g × H + const G: f64 = 9.80665; // m/s² + self.fluid_density_kg_per_m3 * G * actual_head + } + + /// Calculates the efficiency at the given flow rate. + /// + /// Applies affinity laws to find the equivalent operating point. + pub fn efficiency(&self, flow_m3_per_s: f64) -> f64 { + // Handle zero speed - pump is not running + if self.speed_ratio <= 0.0 { + return 0.0; + } + + // Handle zero flow + if flow_m3_per_s <= 0.0 { + return self.curves.efficiency_at_flow(0.0); + } + + let equivalent_flow = AffinityLaws::unscale_flow(flow_m3_per_s, self.speed_ratio); + self.curves.efficiency_at_flow(equivalent_flow) + } + + /// Calculates the hydraulic power consumption. + /// + /// P_hydraulic = Q × ΔP / η + /// + /// # Arguments + /// + /// * `flow_m3_per_s` - Volumetric flow rate in m³/s + /// + /// # Returns + /// + /// Power in Watts + pub fn hydraulic_power(&self, flow_m3_per_s: f64) -> Power { + if flow_m3_per_s <= 0.0 || self.speed_ratio <= 0.0 { + return Power::from_watts(0.0); + } + + let delta_p = self.pressure_rise(flow_m3_per_s); + let eta = self.efficiency(flow_m3_per_s); + + if eta <= 0.0 { + return Power::from_watts(0.0); + } + + // P = Q × ΔP / η + let power_w = flow_m3_per_s * delta_p / eta; + Power::from_watts(power_w) + } + + /// Calculates mass flow rate from volumetric flow. + pub fn mass_flow_from_volumetric(&self, flow_m3_per_s: f64) -> MassFlow { + MassFlow::from_kg_per_s(flow_m3_per_s * self.fluid_density_kg_per_m3) + } + + /// Calculates volumetric flow rate from mass flow. + pub fn volumetric_from_mass_flow(&self, mass_flow: MassFlow) -> f64 { + mass_flow.to_kg_per_s() / self.fluid_density_kg_per_m3 + } + + /// Returns the fluid density. + pub fn fluid_density(&self) -> f64 { + self.fluid_density_kg_per_m3 + } + + /// Returns the performance curves. + pub fn curves(&self) -> &PumpCurves { + &self.curves + } + + /// Returns the speed ratio. + pub fn speed_ratio(&self) -> f64 { + self.speed_ratio + } + + /// Sets the speed ratio (0.0 to 1.0). + pub fn set_speed_ratio(&mut self, ratio: f64) -> Result<(), ComponentError> { + if !(0.0..=1.0).contains(&ratio) { + return Err(ComponentError::InvalidState( + "Speed ratio must be between 0.0 and 1.0".to_string(), + )); + } + self.speed_ratio = ratio; + Ok(()) + } + + /// Returns both ports as a slice for solver topology. + pub fn get_ports_slice(&self) -> [&Port; 2] { + [&self.port_inlet, &self.port_outlet] + } +} + +impl Component for Pump { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + if residuals.len() != self.n_equations() { + return Err(ComponentError::InvalidResidualDimensions { + expected: self.n_equations(), + actual: residuals.len(), + }); + } + + // Handle operational states + match self.operational_state { + OperationalState::Off => { + residuals[0] = state[0]; // Mass flow = 0 + residuals[1] = 0.0; // No energy transfer + return Ok(()); + } + OperationalState::Bypass => { + // Behaves as a pipe: no pressure rise, no energy change + let p_in = self.port_inlet.pressure().to_pascals(); + let p_out = self.port_outlet.pressure().to_pascals(); + let h_in = self.port_inlet.enthalpy().to_joules_per_kg(); + let h_out = self.port_outlet.enthalpy().to_joules_per_kg(); + + residuals[0] = p_in - p_out; + residuals[1] = h_in - h_out; + return Ok(()); + } + OperationalState::On => {} + } + + if state.len() < 2 { + return Err(ComponentError::InvalidStateDimensions { + expected: 2, + actual: state.len(), + }); + } + + // State: [mass_flow_kg_s, power_w] + let mass_flow_kg_s = state[0]; + let _power_w = state[1]; + + // Convert to volumetric flow + let flow_m3_s = mass_flow_kg_s / self.fluid_density_kg_per_m3; + + // Calculate pressure rise from curves + let delta_p_calc = self.pressure_rise(flow_m3_s); + + // Get port pressures + let p_in = self.port_inlet.pressure().to_pascals(); + let p_out = self.port_outlet.pressure().to_pascals(); + let delta_p_actual = p_out - p_in; + + // Residual 0: Pressure balance + residuals[0] = delta_p_calc - delta_p_actual; + + // Residual 1: Power balance + let power_calc = self.hydraulic_power(flow_m3_s).to_watts(); + residuals[1] = power_calc - _power_w; + + Ok(()) + } + + fn jacobian_entries( + &self, + state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + if state.len() < 2 { + return Err(ComponentError::InvalidStateDimensions { + expected: 2, + actual: state.len(), + }); + } + + let mass_flow_kg_s = state[0]; + let flow_m3_s = mass_flow_kg_s / self.fluid_density_kg_per_m3; + + // Numerical derivative of pressure with respect to mass flow + let h = 0.001; + let p_plus = self.pressure_rise(flow_m3_s + h / self.fluid_density_kg_per_m3); + let p_minus = self.pressure_rise(flow_m3_s - h / self.fluid_density_kg_per_m3); + let dp_dm = (p_plus - p_minus) / (2.0 * h); + + // ∂r₀/∂ṁ = dΔP/dṁ + jacobian.add_entry(0, 0, dp_dm); + + // ∂r₀/∂P = -1 (constant) + jacobian.add_entry(0, 1, 0.0); + + // Numerical derivative of power with respect to mass flow + let pow_plus = self + .hydraulic_power(flow_m3_s + h / self.fluid_density_kg_per_m3) + .to_watts(); + let pow_minus = self + .hydraulic_power(flow_m3_s - h / self.fluid_density_kg_per_m3) + .to_watts(); + let dpow_dm = (pow_plus - pow_minus) / (2.0 * h); + + // ∂r₁/∂ṁ + jacobian.add_entry(1, 0, dpow_dm); + + // ∂r₁/∂P = -1 + jacobian.add_entry(1, 1, -1.0); + + Ok(()) + } + + fn n_equations(&self) -> usize { + 2 + } + + fn get_ports(&self) -> &[ConnectedPort] { + &[] + } +} + +impl StateManageable for Pump { + fn state(&self) -> OperationalState { + self.operational_state + } + + fn set_state(&mut self, state: OperationalState) -> Result<(), ComponentError> { + if self.operational_state.can_transition_to(state) { + let from = self.operational_state; + self.operational_state = state; + self.on_state_change(from, state); + Ok(()) + } else { + Err(ComponentError::InvalidStateTransition { + from: self.operational_state, + to: state, + reason: "Transition not allowed".to_string(), + }) + } + } + + fn can_transition_to(&self, target: OperationalState) -> bool { + self.operational_state.can_transition_to(target) + } + + fn circuit_id(&self) -> &CircuitId { + &self.circuit_id + } + + fn set_circuit_id(&mut self, circuit_id: CircuitId) { + self.circuit_id = circuit_id; + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::port::FluidId; + use approx::assert_relative_eq; + use entropyk_core::{Enthalpy, Pressure}; + + fn create_test_curves() -> PumpCurves { + // Typical small pump: + // H = 30 - 10*Q - 50*Q² (m, Q in m³/s) + // η = 0.6 + 1.0*Q - 2.0*Q² + PumpCurves::quadratic(30.0, -10.0, -50.0, 0.6, 1.0, -2.0).unwrap() + } + + fn create_test_pump_disconnected() -> Pump { + let curves = create_test_curves(); + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + let outlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + Pump::new(curves, inlet, outlet, 1000.0).unwrap() + } + + fn create_test_pump_connected() -> Pump { + let curves = create_test_curves(); + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + let outlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + let (inlet_conn, outlet_conn) = inlet.connect(outlet).unwrap(); + + Pump { + curves, + port_inlet: inlet_conn, + port_outlet: outlet_conn, + fluid_density_kg_per_m3: 1000.0, + speed_ratio: 1.0, + circuit_id: CircuitId::default(), + operational_state: OperationalState::default(), + _state: PhantomData, + } + } + + #[test] + fn test_pump_curves_creation() { + let curves = create_test_curves(); + assert_eq!(curves.head_at_flow(0.0), 30.0); + assert_relative_eq!(curves.efficiency_at_flow(0.0), 0.6); + } + + #[test] + fn test_pump_curves_head() { + let curves = create_test_curves(); + // H = 30 - 10*0.5 - 50*0.25 = 30 - 5 - 12.5 = 12.5 m + let head = curves.head_at_flow(0.5); + assert_relative_eq!(head, 12.5, epsilon = 1e-10); + } + + #[test] + fn test_pump_curves_efficiency_clamped() { + let curves = create_test_curves(); + // At very high flow, efficiency might go negative + // Should be clamped to 0 + let eff = curves.efficiency_at_flow(10.0); + assert!(eff >= 0.0); + } + + #[test] + fn test_pump_creation() { + let pump = create_test_pump_disconnected(); + assert_eq!(pump.fluid_density(), 1000.0); + assert_eq!(pump.speed_ratio(), 1.0); + } + + #[test] + fn test_pump_invalid_density() { + let curves = create_test_curves(); + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + let outlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + + let result = Pump::new(curves, inlet, outlet, -1.0); + assert!(result.is_err()); + } + + #[test] + fn test_pump_different_fluids() { + let curves = create_test_curves(); + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + let outlet = Port::new( + FluidId::new("Glycol"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100000.0), + ); + + let result = Pump::new(curves, inlet, outlet, 1000.0); + assert!(result.is_err()); + } + + #[test] + fn test_pump_set_speed_ratio() { + let mut pump = create_test_pump_connected(); + assert!(pump.set_speed_ratio(0.8).is_ok()); + assert_eq!(pump.speed_ratio(), 0.8); + } + + #[test] + fn test_pump_set_speed_ratio_invalid() { + let mut pump = create_test_pump_connected(); + assert!(pump.set_speed_ratio(1.5).is_err()); + assert!(pump.set_speed_ratio(-0.1).is_err()); + } + + #[test] + fn test_pump_pressure_rise_full_speed() { + let pump = create_test_pump_connected(); + // At Q=0: H=30m, P = 1000 * 9.8 * 30 ≈ 294200 Pa + let delta_p = pump.pressure_rise(0.0); + let expected = 1000.0 * 9.80665 * 30.0; + assert_relative_eq!(delta_p, expected, epsilon = 100.0); + } + + #[test] + fn test_pump_pressure_rise_reduced_speed() { + let mut pump = create_test_pump_connected(); + pump.set_speed_ratio(0.5).unwrap(); + + // At 50% speed, shut-off head is 25% of full speed + // H = 0.25 * 30 = 7.5 m + let delta_p = pump.pressure_rise(0.0); + let expected = 1000.0 * 9.80665 * 7.5; + assert_relative_eq!(delta_p, expected, epsilon = 100.0); + } + + #[test] + fn test_pump_hydraulic_power() { + let pump = create_test_pump_connected(); + + // At Q=0.1 m³/s: H ≈ 30 - 1 - 0.5 = 28.5 m + // η ≈ 0.6 + 0.1 - 0.02 = 0.68 + // P = 1000 * 9.8 * 0.1 * 28.5 / 0.68 ≈ 4110 W + let power = pump.hydraulic_power(0.1); + assert!(power.to_watts() > 0.0); + assert!(power.to_watts() < 50000.0); + } + + #[test] + fn test_pump_affinity_laws_power() { + let pump_full = create_test_pump_connected(); + + let mut pump_half = create_test_pump_connected(); + pump_half.set_speed_ratio(0.5).unwrap(); + + // Power at half speed should be ~12.5% of full speed (cube law) + // At the same equivalent flow point + let power_full = pump_full.hydraulic_power(0.1); + let power_half = pump_half.hydraulic_power(0.05); // Half the flow + + // P_half / P_full ≈ 0.5³ = 0.125 + let ratio = power_half.to_watts() / power_full.to_watts(); + assert_relative_eq!(ratio, 0.125, epsilon = 0.05); + } + + #[test] + fn test_pump_component_n_equations() { + let pump = create_test_pump_connected(); + assert_eq!(pump.n_equations(), 2); + } + + #[test] + fn test_pump_component_compute_residuals() { + let pump = create_test_pump_connected(); + let state = vec![50.0, 2000.0]; // mass flow, power + let mut residuals = vec![0.0; 2]; + + let result = pump.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + } + + #[test] + fn test_pump_state_manageable() { + let pump = create_test_pump_connected(); + assert_eq!(pump.state(), OperationalState::On); + assert!(pump.can_transition_to(OperationalState::Off)); + } +} diff --git a/crates/components/src/state_machine.rs b/crates/components/src/state_machine.rs new file mode 100644 index 0000000..93c232b --- /dev/null +++ b/crates/components/src/state_machine.rs @@ -0,0 +1,940 @@ +//! Component State Machine and Circuit Management +//! +//! This module provides types for managing component operational states (ON/OFF/BYPASS) +//! and circuit identification, as required by FR6-FR9 of the Entropyk specification. +//! +//! ## Operational States +//! +//! Components can be in one of three operational states: +//! - **On**: Normal operation with full thermodynamic behavior +//! - **Off**: Component contributes zero mass flow (FR7) +//! - **Bypass**: Component behaves as an adiabatic pipe (FR8) +//! +//! ## Circuit Identification +//! +//! Each component belongs to a specific circuit identified by a `CircuitId`. +//! Multi-circuit machines allow simulation of complex systems like dual-circuit +//! heat pumps (FR9). +//! +//! ## State Management +//! +//! The [`StateManageable`] trait provides a common interface for components +//! that support operational state management. All major components (Compressor, +//! ExpansionValve, HeatExchanger) implement this trait. +//! +//! ## State History +//! +//! For debugging purposes, the [`StateHistory`] type can track state transitions +//! with timestamps. +//! +//! ## Example +//! +//! ```rust +//! use entropyk_components::state_machine::{OperationalState, CircuitId, StateManageable}; +//! +//! // Create a circuit identifier +//! let circuit = CircuitId::new("primary"); +//! +//! // Set component state +//! let state = OperationalState::On; +//! +//! // Check transitions +//! assert!(state.can_transition_to(OperationalState::Off)); +//! ``` + +use std::collections::VecDeque; +use std::time::Instant; + +use crate::ComponentError; + +/// Error type for invalid state transitions. +/// +/// This error is returned when attempting an invalid state transition. +/// Currently, all transitions between states are allowed, but this type +/// is provided for future extensibility and custom transition rules. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct StateTransitionError { + /// The state we're transitioning from + pub from: OperationalState, + /// The state we're attempting to transition to + pub to: OperationalState, + /// Human-readable reason for the failure + pub reason: &'static str, +} + +impl std::fmt::Display for StateTransitionError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "Invalid state transition from {:?} to {:?}: {}", + self.from, self.to, self.reason + ) + } +} + +impl std::error::Error for StateTransitionError {} + +impl From for crate::ComponentError { + fn from(err: StateTransitionError) -> Self { + crate::ComponentError::InvalidStateTransition { + from: err.from, + to: err.to, + reason: err.reason.to_string(), + } + } +} + +/// Operational state of a component. +/// +/// This enum represents the three possible operational states of a component +/// as defined in FR6-FR8: +/// +/// - **On**: Normal operation with full thermodynamic calculations +/// - **Off**: Component contributes zero mass flow to the system +/// - **Bypass**: Component behaves as an adiabatic pipe (P_in = P_out, h_in = h_out) +/// +/// # State Behavior +/// +/// | State | Mass Flow | Energy Transfer | Pressure Drop | +/// |-------|-----------|-----------------|---------------| +/// | On | Normal | Full | Normal | +/// | Off | Zero | None | Infinite | +/// | Bypass| Continuity| None (adiabatic)| Zero | +/// +/// # Examples +/// +/// ``` +/// use entropyk_components::state_machine::OperationalState; +/// +/// let state = OperationalState::On; +/// assert!(state.is_active()); +/// +/// let state = OperationalState::Off; +/// assert!(!state.is_active()); +/// ``` +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum OperationalState { + /// Normal operation with full thermodynamic behavior. + /// + /// In this state, the component performs its full thermodynamic calculations, + /// including heat transfer, pressure changes, and work transfer. + On, + + /// Component is turned off, contributing zero mass flow. + /// + /// When a component is in the Off state (FR7): + /// - Mass flow through the component is forced to zero + /// - The component acts as a blockage in the circuit + /// - No heat transfer or work transfer occurs + /// - This state is used for simulating component failures or seasonal operation + Off, + + /// Bypass mode - component behaves as an adiabatic pipe. + /// + /// When a component is in the Bypass state (FR8): + /// - Pressure at inlet equals pressure at outlet (P_in = P_out) + /// - Enthalpy at inlet equals enthalpy at outlet (h_in = h_out) + /// - No heat transfer occurs (adiabatic) + /// - Mass flow continues through the bypass path + /// - This state is useful for economizers in summer mode or valve bypasses + Bypass, +} + +impl OperationalState { + /// Returns true if the component is active (On or Bypass). + /// + /// An active component allows mass flow through it, though Bypass + /// mode has different thermodynamic behavior than On mode. + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::OperationalState; + /// + /// assert!(OperationalState::On.is_active()); + /// assert!(OperationalState::Bypass.is_active()); + /// assert!(!OperationalState::Off.is_active()); + /// ``` + pub fn is_active(&self) -> bool { + matches!(self, OperationalState::On | OperationalState::Bypass) + } + + /// Returns true if the component is in normal operation mode. + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::OperationalState; + /// + /// assert!(OperationalState::On.is_on()); + /// assert!(!OperationalState::Off.is_on()); + /// assert!(!OperationalState::Bypass.is_on()); + /// ``` + pub fn is_on(&self) -> bool { + matches!(self, OperationalState::On) + } + + /// Returns true if the component is off. + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::OperationalState; + /// + /// assert!(OperationalState::Off.is_off()); + /// assert!(!OperationalState::On.is_off()); + /// ``` + pub fn is_off(&self) -> bool { + matches!(self, OperationalState::Off) + } + + /// Returns true if the component is in bypass mode. + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::OperationalState; + /// + /// assert!(OperationalState::Bypass.is_bypass()); + /// assert!(!OperationalState::On.is_bypass()); + /// ``` + pub fn is_bypass(&self) -> bool { + matches!(self, OperationalState::Bypass) + } + + /// Returns the mass flow multiplier for this state. + /// + /// This multiplier is used in residual calculations: + /// - On: 1.0 (full mass flow) + /// - Bypass: 1.0 (mass flow continues through bypass) + /// - Off: 0.0 (no mass flow) + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::OperationalState; + /// + /// assert_eq!(OperationalState::On.mass_flow_multiplier(), 1.0); + /// assert_eq!(OperationalState::Bypass.mass_flow_multiplier(), 1.0); + /// assert_eq!(OperationalState::Off.mass_flow_multiplier(), 0.0); + /// ``` + pub fn mass_flow_multiplier(&self) -> f64 { + match self { + OperationalState::On => 1.0, + OperationalState::Off => 0.0, + OperationalState::Bypass => 1.0, + } + } + + /// Checks if a transition to the target state is valid. + /// + /// Currently, all state transitions are allowed. This method is provided + /// for components that may have custom transition rules. + /// + /// # Arguments + /// + /// * `target` - The target operational state + /// + /// # Returns + /// + /// Returns `true` if the transition is valid. + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::OperationalState; + /// + /// assert!(OperationalState::On.can_transition_to(OperationalState::Off)); + /// assert!(OperationalState::Off.can_transition_to(OperationalState::Bypass)); + /// assert!(OperationalState::Bypass.can_transition_to(OperationalState::On)); + /// ``` + pub fn can_transition_to(&self, target: OperationalState) -> bool { + matches!( + (self, target), + (OperationalState::On, OperationalState::Off) + | (OperationalState::On, OperationalState::Bypass) + | (OperationalState::Off, OperationalState::On) + | (OperationalState::Off, OperationalState::Bypass) + | (OperationalState::Bypass, OperationalState::On) + | (OperationalState::Bypass, OperationalState::Off) + | (OperationalState::On, OperationalState::On) + | (OperationalState::Off, OperationalState::Off) + | (OperationalState::Bypass, OperationalState::Bypass) + ) + } + + /// Attempts to transition to the target state. + /// + /// # Arguments + /// + /// * `target` - The target operational state + /// + /// # Returns + /// + /// Returns `Ok(target)` if the transition is valid, or a [`StateTransitionError`] + /// if the transition is not allowed. + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::OperationalState; + /// + /// let state = OperationalState::On; + /// let new_state = state.transition_to(OperationalState::Off).unwrap(); + /// assert_eq!(new_state, OperationalState::Off); + /// ``` + pub fn transition_to( + &self, + target: OperationalState, + ) -> Result { + if self.can_transition_to(target) { + Ok(target) + } else { + Err(StateTransitionError { + from: *self, + to: target, + reason: "Transition not allowed", + }) + } + } +} + +impl Default for OperationalState { + /// Default operational state is On. + fn default() -> Self { + OperationalState::On + } +} + +/// Unique identifier for a thermodynamic circuit. +/// +/// A `CircuitId` identifies a complete fluid circuit within a machine. +/// Multi-circuit machines (e.g., dual-circuit heat pumps) require distinct +/// identifiers for each independent fluid loop (FR9). +/// +/// # Use Cases +/// +/// - Single-circuit machines: Use "default" or "main" +/// - Dual-circuit heat pumps: Use "circuit_1" and "circuit_2" +/// - Complex systems: Use descriptive names like "primary", "secondary", "economizer" +/// +/// # Examples +/// +/// ``` +/// use entropyk_components::state_machine::CircuitId; +/// +/// let main_circuit = CircuitId::new("main"); +/// let secondary = CircuitId::new("secondary"); +/// +/// assert_ne!(main_circuit, secondary); +/// ``` +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CircuitId(String); + +impl CircuitId { + /// Creates a new circuit identifier from a string. + /// + /// # Arguments + /// + /// * `id` - A unique string identifier for the circuit + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::CircuitId; + /// + /// let circuit = CircuitId::new("primary"); + /// ``` + pub fn new(id: impl Into) -> Self { + Self(id.into()) + } + + /// Returns the circuit identifier as a string slice. + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::CircuitId; + /// + /// let circuit = CircuitId::new("main"); + /// assert_eq!(circuit.as_str(), "main"); + /// ``` + pub fn as_str(&self) -> &str { + &self.0 + } + + /// Creates a default circuit identifier. + /// + /// Returns a CircuitId with value "default". + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::CircuitId; + /// + /// let default = CircuitId::default_circuit(); + /// assert_eq!(default.as_str(), "default"); + /// ``` + pub fn default_circuit() -> Self { + Self("default".to_string()) + } +} + +impl Default for CircuitId { + /// Default circuit identifier is "default". + fn default() -> Self { + Self("default".to_string()) + } +} + +impl AsRef for CircuitId { + fn as_ref(&self) -> &str { + &self.0 + } +} + +impl std::fmt::Display for CircuitId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +/// Record of a state transition for debugging purposes. +/// +/// Tracks when a component changed states, what the previous state was, +/// and what the new state is. +#[derive(Debug, Clone)] +pub struct StateTransitionRecord { + /// Timestamp when the transition occurred + pub timestamp: Instant, + /// State before the transition + pub from_state: OperationalState, + /// State after the transition + pub to_state: OperationalState, +} + +impl StateTransitionRecord { + /// Creates a new state transition record. + /// + /// # Arguments + /// + /// * `from_state` - The state before transition + /// * `to_state` - The state after transition + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::{StateTransitionRecord, OperationalState}; + /// + /// let record = StateTransitionRecord::new(OperationalState::On, OperationalState::Off); + /// assert_eq!(record.from_state, OperationalState::On); + /// assert_eq!(record.to_state, OperationalState::Off); + /// ``` + pub fn new(from_state: OperationalState, to_state: OperationalState) -> Self { + Self { + timestamp: Instant::now(), + from_state, + to_state, + } + } + + /// Returns the elapsed time since this transition occurred. + pub fn elapsed(&self) -> std::time::Duration { + self.timestamp.elapsed() + } +} + +/// History buffer for tracking state transitions. +/// +/// Maintains a configurable-size buffer of recent state transitions +/// for debugging and analysis purposes. +/// +/// # Examples +/// +/// ``` +/// use entropyk_components::state_machine::{StateHistory, OperationalState}; +/// +/// let mut history = StateHistory::new(10); +/// history.record(OperationalState::On, OperationalState::Off); +/// +/// assert_eq!(history.len(), 1); +/// assert_eq!(history.records()[0].from_state, OperationalState::On); +/// ``` +#[derive(Debug, Clone)] +pub struct StateHistory { + records: VecDeque, + max_depth: usize, +} + +impl StateHistory { + /// Default maximum history depth. + pub const DEFAULT_MAX_DEPTH: usize = 10; + + /// Creates a new state history with the specified maximum depth. + /// + /// # Arguments + /// + /// * `max_depth` - Maximum number of records to keep + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::StateHistory; + /// + /// let history = StateHistory::new(20); + /// assert_eq!(history.max_depth(), 20); + /// ``` + pub fn new(max_depth: usize) -> Self { + Self { + records: VecDeque::with_capacity(max_depth), + max_depth, + } + } + + /// Creates a new state history with default depth (10 records). + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::StateHistory; + /// + /// let history = StateHistory::default(); + /// assert_eq!(history.max_depth(), 10); + /// ``` + pub fn with_default_depth() -> Self { + Self::new(Self::DEFAULT_MAX_DEPTH) + } + + /// Returns the maximum number of records this history can hold. + pub fn max_depth(&self) -> usize { + self.max_depth + } + + /// Records a state transition. + /// + /// If the history is full, the oldest record is removed. + /// + /// # Arguments + /// + /// * `from_state` - The state before transition + /// * `to_state` - The state after transition + /// + /// # Examples + /// + /// ``` + /// use entropyk_components::state_machine::{StateHistory, OperationalState}; + /// + /// let mut history = StateHistory::default(); + /// history.record(OperationalState::On, OperationalState::Off); + /// + /// assert_eq!(history.len(), 1); + /// ``` + pub fn record(&mut self, from_state: OperationalState, to_state: OperationalState) { + if self.records.len() >= self.max_depth { + self.records.pop_front(); + } + self.records + .push_back(StateTransitionRecord::new(from_state, to_state)); + } + + /// Returns the number of records in the history. + pub fn len(&self) -> usize { + self.records.len() + } + + /// Returns true if there are no records in the history. + pub fn is_empty(&self) -> bool { + self.records.is_empty() + } + + /// Returns a slice of all records, oldest first. + pub fn records(&self) -> &[StateTransitionRecord] { + self.records.as_slices().0 + } + + /// Returns the most recent transition record, if any. + pub fn last(&self) -> Option<&StateTransitionRecord> { + self.records.back() + } + + /// Clears all records from the history. + pub fn clear(&mut self) { + self.records.clear(); + } +} + +impl Default for StateHistory { + fn default() -> Self { + Self::with_default_depth() + } +} + +/// Trait for components that support operational state management. +/// +/// This trait provides a common interface for managing the operational +/// state of thermodynamic components. All major components (Compressor, +/// ExpansionValve, HeatExchanger) implement this trait. +/// +/// # Object Safety +/// +/// This trait is object-safe and can be used with dynamic dispatch. +/// +/// # Callback Hooks +/// +/// The trait provides optional callback hooks via `on_state_change()` which +/// can be overridden to perform actions when state transitions occur. +/// +/// # Examples +/// +/// ```rust,ignore +/// use entropyk_components::state_machine::{StateManageable, OperationalState, CircuitId}; +/// use entropyk_components::ComponentError; +/// +/// fn check_component_state(component: &dyn StateManageable) { +/// println!("Component state: {:?}", component.state()); +/// println!("Circuit: {}", component.circuit_id().as_str()); +/// } +/// ``` +pub trait StateManageable { + /// Returns the current operational state. + fn state(&self) -> OperationalState; + + /// Sets the operational state with validation. + /// + /// # Arguments + /// + /// * `state` - The new operational state + /// + /// # Returns + /// + /// Returns `Ok(())` if the transition is valid, or a [`ComponentError`] + /// if the transition is not allowed. + /// + /// # Errors + /// + /// Returns [`ComponentError::InvalidStateTransition`] if the transition + /// is not valid for this component. + fn set_state(&mut self, state: OperationalState) -> Result<(), ComponentError>; + + /// Checks if a transition to the target state is valid. + /// + /// # Arguments + /// + /// * `target` - The target operational state + /// + /// # Returns + /// + /// Returns `true` if the transition is valid. + fn can_transition_to(&self, target: OperationalState) -> bool; + + /// Returns the circuit identifier. + fn circuit_id(&self) -> &CircuitId; + + /// Sets the circuit identifier. + /// + /// # Arguments + /// + /// * `circuit_id` - The new circuit identifier + fn set_circuit_id(&mut self, circuit_id: CircuitId); + + /// Optional callback invoked after a state change. + /// + /// Override this method to perform actions when the component's state changes, + /// such as logging, updating internal state, or triggering side effects. + /// + /// # Arguments + /// + /// * `from` - The previous operational state + /// * `to` - The new operational state + /// + /// # Default Implementation + /// + /// The default implementation does nothing. Override to add custom behavior. + fn on_state_change(&mut self, _from: OperationalState, _to: OperationalState) { + // Default: no-op. Override to add callback behavior. + } + + /// Returns the state transition history, if tracking is enabled. + /// + /// Components can optionally track state transition history for debugging. + /// By default, this returns `None`. Override to return a reference to + /// the component's state history. + /// + /// # Returns + /// + /// Returns `Some(&StateHistory)` if history tracking is enabled, or `None` otherwise. + fn state_history(&self) -> Option<&StateHistory> { + None + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_operational_state_on() { + let state = OperationalState::On; + assert!(state.is_on()); + assert!(!state.is_off()); + assert!(!state.is_bypass()); + assert!(state.is_active()); + assert_eq!(state.mass_flow_multiplier(), 1.0); + } + + #[test] + fn test_operational_state_off() { + let state = OperationalState::Off; + assert!(!state.is_on()); + assert!(state.is_off()); + assert!(!state.is_bypass()); + assert!(!state.is_active()); + assert_eq!(state.mass_flow_multiplier(), 0.0); + } + + #[test] + fn test_operational_state_bypass() { + let state = OperationalState::Bypass; + assert!(!state.is_on()); + assert!(!state.is_off()); + assert!(state.is_bypass()); + assert!(state.is_active()); + assert_eq!(state.mass_flow_multiplier(), 1.0); + } + + #[test] + fn test_operational_state_default() { + let state: OperationalState = Default::default(); + assert!(state.is_on()); + } + + #[test] + fn test_operational_state_equality() { + assert_eq!(OperationalState::On, OperationalState::On); + assert_eq!(OperationalState::Off, OperationalState::Off); + assert_eq!(OperationalState::Bypass, OperationalState::Bypass); + assert_ne!(OperationalState::On, OperationalState::Off); + assert_ne!(OperationalState::On, OperationalState::Bypass); + } + + #[test] + fn test_can_transition_to_all_combinations() { + let states = [ + OperationalState::On, + OperationalState::Off, + OperationalState::Bypass, + ]; + + for from in states { + for to in states { + assert!( + from.can_transition_to(to), + "Transition from {:?} to {:?} should be allowed", + from, + to + ); + } + } + } + + #[test] + fn test_transition_to_success() { + let state = OperationalState::On; + let result = state.transition_to(OperationalState::Off); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), OperationalState::Off); + } + + #[test] + fn test_transition_to_same_state() { + let state = OperationalState::On; + let result = state.transition_to(OperationalState::On); + assert!(result.is_ok()); + assert_eq!(result.unwrap(), OperationalState::On); + } + + #[test] + fn test_state_transition_error_display() { + let err = StateTransitionError { + from: OperationalState::On, + to: OperationalState::Off, + reason: "Test reason", + }; + let msg = format!("{}", err); + assert!(msg.contains("Invalid state transition")); + assert!(msg.contains("On")); + assert!(msg.contains("Off")); + assert!(msg.contains("Test reason")); + } + + #[test] + fn test_circuit_id_creation() { + let circuit = CircuitId::new("main"); + assert_eq!(circuit.as_str(), "main"); + } + + #[test] + fn test_circuit_id_from_string() { + let name = String::from("secondary"); + let circuit = CircuitId::new(name); + assert_eq!(circuit.as_str(), "secondary"); + } + + #[test] + fn test_circuit_id_default() { + let circuit = CircuitId::default(); + assert_eq!(circuit.as_str(), "default"); + } + + #[test] + fn test_circuit_id_default_circuit() { + let circuit = CircuitId::default_circuit(); + assert_eq!(circuit.as_str(), "default"); + } + + #[test] + fn test_circuit_id_equality() { + let c1 = CircuitId::new("circuit_1"); + let c2 = CircuitId::new("circuit_1"); + let c3 = CircuitId::new("circuit_2"); + + assert_eq!(c1, c2); + assert_ne!(c1, c3); + } + + #[test] + fn test_circuit_id_as_ref() { + let circuit = CircuitId::new("test"); + let s: &str = circuit.as_ref(); + assert_eq!(s, "test"); + } + + #[test] + fn test_circuit_id_display() { + let circuit = CircuitId::new("main_circuit"); + assert_eq!(format!("{}", circuit), "main_circuit"); + } + + #[test] + fn test_circuit_id_hash() { + use std::collections::HashMap; + + let mut map = HashMap::new(); + map.insert(CircuitId::new("c1"), 1); + map.insert(CircuitId::new("c2"), 2); + + assert_eq!(map.get(&CircuitId::new("c1")), Some(&1)); + assert_eq!(map.get(&CircuitId::new("c2")), Some(&2)); + } + + #[test] + fn test_state_transition_record_creation() { + let record = StateTransitionRecord::new(OperationalState::On, OperationalState::Off); + assert_eq!(record.from_state, OperationalState::On); + assert_eq!(record.to_state, OperationalState::Off); + } + + #[test] + fn test_state_transition_record_elapsed() { + let record = StateTransitionRecord::new(OperationalState::On, OperationalState::Off); + let elapsed = record.elapsed(); + assert!(elapsed.as_nanos() >= 0); + } + + #[test] + fn test_state_history_creation() { + let history = StateHistory::new(5); + assert_eq!(history.max_depth(), 5); + assert!(history.is_empty()); + assert_eq!(history.len(), 0); + } + + #[test] + fn test_state_history_default() { + let history = StateHistory::default(); + assert_eq!(history.max_depth(), StateHistory::DEFAULT_MAX_DEPTH); + } + + #[test] + fn test_state_history_with_default_depth() { + let history = StateHistory::with_default_depth(); + assert_eq!(history.max_depth(), 10); + } + + #[test] + fn test_state_history_record() { + let mut history = StateHistory::new(10); + history.record(OperationalState::On, OperationalState::Off); + + assert_eq!(history.len(), 1); + assert!(!history.is_empty()); + + let records = history.records(); + assert_eq!(records.len(), 1); + assert_eq!(records[0].from_state, OperationalState::On); + assert_eq!(records[0].to_state, OperationalState::Off); + } + + #[test] + fn test_state_history_last() { + let mut history = StateHistory::new(10); + assert!(history.last().is_none()); + + history.record(OperationalState::On, OperationalState::Off); + let last = history.last().unwrap(); + assert_eq!(last.from_state, OperationalState::On); + assert_eq!(last.to_state, OperationalState::Off); + + history.record(OperationalState::Off, OperationalState::Bypass); + let last = history.last().unwrap(); + assert_eq!(last.from_state, OperationalState::Off); + assert_eq!(last.to_state, OperationalState::Bypass); + } + + #[test] + fn test_state_history_max_depth() { + let mut history = StateHistory::new(3); + + history.record(OperationalState::On, OperationalState::Off); + history.record(OperationalState::Off, OperationalState::Bypass); + history.record(OperationalState::Bypass, OperationalState::On); + assert_eq!(history.len(), 3); + + history.record(OperationalState::On, OperationalState::Off); + assert_eq!(history.len(), 3); + + let records = history.records(); + assert_eq!(records[0].from_state, OperationalState::Off); + assert_eq!(records[0].to_state, OperationalState::Bypass); + } + + #[test] + fn test_state_history_clear() { + let mut history = StateHistory::new(10); + history.record(OperationalState::On, OperationalState::Off); + history.record(OperationalState::Off, OperationalState::Bypass); + + assert_eq!(history.len(), 2); + + history.clear(); + + assert!(history.is_empty()); + assert_eq!(history.len(), 0); + } + + #[test] + fn test_state_history_multiple_transitions() { + let mut history = StateHistory::new(10); + + history.record(OperationalState::On, OperationalState::Off); + history.record(OperationalState::Off, OperationalState::Bypass); + history.record(OperationalState::Bypass, OperationalState::On); + + assert_eq!(history.len(), 3); + + let records = history.records(); + assert_eq!(records[0].from_state, OperationalState::On); + assert_eq!(records[1].from_state, OperationalState::Off); + assert_eq!(records[2].from_state, OperationalState::Bypass); + } +} diff --git a/crates/core/Cargo.toml b/crates/core/Cargo.toml index a299244..fd779e5 100644 --- a/crates/core/Cargo.toml +++ b/crates/core/Cargo.toml @@ -13,3 +13,4 @@ serde.workspace = true [dev-dependencies] approx = "0.5" +serde_json = "1.0" diff --git a/crates/core/src/calib.rs b/crates/core/src/calib.rs new file mode 100644 index 0000000..3cbefa6 --- /dev/null +++ b/crates/core/src/calib.rs @@ -0,0 +1,175 @@ +//! Calibration factors (Calib) for matching simulation to real machine test data. +//! +//! Short name: Calib. Default 1.0 = no correction. Typical range [0.8, 1.2]. +//! Refs: Buildings Modelica, EnergyPlus, TRNSYS, TIL Suite, alphaXiv. +//! +//! ## Recommended calibration order +//! +//! To avoid parameter fighting, calibrate in this order: +//! 1. **f_m** — mass flow (compressor power + ṁ measurements) +//! 2. **f_dp** — pressure drops (inlet/outlet pressures) +//! 3. **f_ua** — heat transfer (superheat, subcooling, capacity) +//! 4. **f_power** — compressor power (if f_m insufficient) + +use serde::{Deserialize, Serialize}; + +fn one() -> f64 { + 1.0 +} + +/// Calibration factors for matching simulation to real machine test data. +/// +/// Default 1.0 = no correction. Typical range [0.8, 1.2]. All factors are validated to lie in [0.5, 2.0]. +/// +/// | Field | Full name | Effect | Components | +/// |-----------|------------------------|---------------------------------|-----------------------------| +/// | `f_m` | mass flow factor | ṁ_eff = f_m × ṁ_nominal | Compressor, Expansion Valve | +/// | `f_dp` | pressure drop factor | ΔP_eff = f_dp × ΔP_nominal | Pipe, Heat Exchanger | +/// | `f_ua` | UA factor | UA_eff = f_ua × UA_nominal | Evaporator, Condenser | +/// | `f_power` | power factor | Ẇ_eff = f_power × Ẇ_nominal | Compressor | +/// | `f_etav` | volumetric efficiency | η_v,eff = f_etav × η_v,nominal | Compressor (displacement) | +#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)] +pub struct Calib { + /// f_m: ṁ_eff = f_m × ṁ_nominal (Compressor, Valve) + #[serde(default = "one", alias = "calib_flow")] + pub f_m: f64, + /// f_dp: ΔP_eff = f_dp × ΔP_nominal (Pipe, HX) + #[serde(default = "one", alias = "calib_dpr")] + pub f_dp: f64, + /// f_ua: UA_eff = f_ua × UA_nominal (Evaporator, Condenser) + #[serde(default = "one", alias = "calib_ua")] + pub f_ua: f64, + /// f_power: Ẇ_eff = f_power × Ẇ_nominal (Compressor) + #[serde(default = "one")] + pub f_power: f64, + /// f_etav: η_v,eff = f_etav × η_v,nominal (Compressor displacement) + #[serde(default = "one")] + pub f_etav: f64, +} + +impl Default for Calib { + fn default() -> Self { + Self { + f_m: 1.0, + f_dp: 1.0, + f_ua: 1.0, + f_power: 1.0, + f_etav: 1.0, + } + } +} + +/// Error returned when a calibration factor is outside the allowed range [0.5, 2.0]. +#[derive(Debug, Clone, PartialEq)] +pub struct CalibValidationError { + /// Factor name (e.g. "f_m") + pub factor: &'static str, + /// Value that failed validation + pub value: f64, +} + +impl std::fmt::Display for CalibValidationError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "calib {} = {} is outside allowed range [0.5, 2.0]", + self.factor, self.value + ) + } +} + +impl std::error::Error for CalibValidationError {} + +const MIN_F: f64 = 0.5; +const MAX_F: f64 = 2.0; + +impl Calib { + /// Validates that all factors lie in [0.5, 2.0]. Returns `Ok(())` or the first invalid factor. + pub fn validate(&self) -> Result<(), CalibValidationError> { + let check = |name: &'static str, value: f64| { + if !(MIN_F..=MAX_F).contains(&value) { + Err(CalibValidationError { + factor: name, + value, + }) + } else { + Ok(()) + } + }; + check("f_m", self.f_m)?; + check("f_dp", self.f_dp)?; + check("f_ua", self.f_ua)?; + check("f_power", self.f_power)?; + check("f_etav", self.f_etav)?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_calib_default_all_one() { + let c = Calib::default(); + assert_eq!(c.f_m, 1.0); + assert_eq!(c.f_dp, 1.0); + assert_eq!(c.f_ua, 1.0); + assert_eq!(c.f_power, 1.0); + assert_eq!(c.f_etav, 1.0); + assert!(c.validate().is_ok()); + } + + #[test] + fn test_calib_validation_bounds() { + let ok = Calib { + f_m: 0.5, + f_dp: 1.0, + f_ua: 2.0, + f_power: 1.0, + f_etav: 1.0, + }; + assert!(ok.validate().is_ok()); + + let bad_m = Calib { + f_m: 0.4, + ..Default::default() + }; + let err = bad_m.validate().unwrap_err(); + assert_eq!(err.factor, "f_m"); + assert!((err.value - 0.4).abs() < 1e-9); + + let bad_high = Calib { + f_ua: 2.1, + ..Default::default() + }; + let err2 = bad_high.validate().unwrap_err(); + assert_eq!(err2.factor, "f_ua"); + } + + #[test] + fn test_calib_json_roundtrip() { + let c = Calib { + f_m: 1.1, + f_dp: 0.9, + f_ua: 1.0, + f_power: 1.05, + f_etav: 1.0, + }; + let json = serde_json::to_string(&c).unwrap(); + let c2: Calib = serde_json::from_str(&json).unwrap(); + assert_eq!(c, c2); + } + + #[test] + fn test_calib_aliases_backward_compat() { + // calib_flow → f_m + let json = r#"{"calib_flow": 1.2}"#; + let c: Calib = serde_json::from_str(json).unwrap(); + assert_eq!(c.f_m, 1.2); + assert_eq!(c.f_dp, 1.0); + assert_eq!(c.f_ua, 1.0); + assert_eq!(c.f_power, 1.0); + assert_eq!(c.f_etav, 1.0); + } +} diff --git a/crates/core/src/lib.rs b/crates/core/src/lib.rs index 26f233a..6bc8ee0 100644 --- a/crates/core/src/lib.rs +++ b/crates/core/src/lib.rs @@ -37,7 +37,14 @@ #![deny(warnings)] #![warn(missing_docs)] +pub mod calib; pub mod types; // Re-export all physical types for convenience -pub use types::{Enthalpy, MassFlow, Pressure, Temperature}; +pub use types::{ + Enthalpy, MassFlow, MIN_MASS_FLOW_REGULARIZATION_KG_S, Power, Pressure, Temperature, + ThermalConductance, +}; + +// Re-export calibration types +pub use calib::{Calib, CalibValidationError}; diff --git a/crates/core/src/types.rs b/crates/core/src/types.rs index 6603178..b976a84 100644 --- a/crates/core/src/types.rs +++ b/crates/core/src/types.rs @@ -303,10 +303,24 @@ impl Div for Enthalpy { } } +/// Minimum mass flow used in denominators to avoid division by zero (zero-flow regularization). +/// +/// When mass flow is zero or below this value, use [`MassFlow::regularized`] in any expression +/// that divides by mass flow (e.g. Q/ṁ, ΔP/ṁ²) or by quantities derived from it (e.g. Reynolds, +/// capacity rate C = ṁ·Cp). This prevents NaN/Inf while preserving solver convergence. +/// +/// Value: 1e-12 kg/s (small enough to not affect physical results when ṁ >> ε). +pub const MIN_MASS_FLOW_REGULARIZATION_KG_S: f64 = 1e-12; + /// Mass flow rate in kilograms per second (kg/s). /// /// Internally stores the value in kilograms per second (SI base unit). /// +/// # Zero-flow regularization +/// +/// When dividing by mass flow (or using it in denominators), use [`MassFlow::regularized`] so that +/// zero-flow branches do not cause division by zero. See [`MIN_MASS_FLOW_REGULARIZATION_KG_S`]. +/// /// # Example /// /// ``` @@ -338,6 +352,15 @@ impl MassFlow { pub fn to_grams_per_s(&self) -> f64 { self.0 * 1_000.0 } + + /// Returns mass flow clamped to at least [`MIN_MASS_FLOW_REGULARIZATION_KG_S`] for use in denominators. + /// + /// Use this whenever dividing by mass flow (e.g. Q/ṁ) or by a quantity derived from it (e.g. Re ∝ ṁ) + /// to avoid division by zero when the branch has zero flow (e.g. component in Off state). + #[must_use] + pub fn regularized(self) -> Self { + MassFlow(self.0.max(MIN_MASS_FLOW_REGULARIZATION_KG_S)) + } } impl fmt::Display for MassFlow { @@ -392,6 +415,148 @@ impl Div for MassFlow { } } +/// Power in Watts (W). +/// +/// Internally stores the value in Watts (SI base unit). +/// Provides conversions to/from common units like kilowatts. +/// +/// # Example +/// +/// ``` +/// use entropyk_core::Power; +/// +/// let p = Power::from_kilowatts(1.0); +/// assert_eq!(p.to_watts(), 1000.0); +/// assert_eq!(p.to_kilowatts(), 1.0); +/// ``` +#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)] +pub struct Power(pub f64); + +impl Power { + /// Creates a Power from a value in Watts. + pub fn from_watts(value: f64) -> Self { + Power(value) + } + + /// Creates a Power from a value in kilowatts. + pub fn from_kilowatts(value: f64) -> Self { + Power(value * 1_000.0) + } + + /// Creates a Power from a value in megawatts. + pub fn from_megawatts(value: f64) -> Self { + Power(value * 1_000_000.0) + } + + /// Returns the power in Watts. + pub fn to_watts(&self) -> f64 { + self.0 + } + + /// Returns the power in kilowatts. + pub fn to_kilowatts(&self) -> f64 { + self.0 / 1_000.0 + } + + /// Returns the power in megawatts. + pub fn to_megawatts(&self) -> f64 { + self.0 / 1_000_000.0 + } +} + +impl fmt::Display for Power { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{} W", self.0) + } +} + +impl From for Power { + fn from(value: f64) -> Self { + Power(value) + } +} + +impl Add for Power { + type Output = Power; + + fn add(self, other: Power) -> Power { + Power(self.0 + other.0) + } +} + +impl Sub for Power { + type Output = Power; + + fn sub(self, other: Power) -> Power { + Power(self.0 - other.0) + } +} + +impl Mul for Power { + type Output = Power; + + fn mul(self, scalar: f64) -> Power { + Power(self.0 * scalar) + } +} + +impl Mul for f64 { + type Output = Power; + + fn mul(self, p: Power) -> Power { + Power(self * p.0) + } +} + +impl Div for Power { + type Output = Power; + + fn div(self, scalar: f64) -> Power { + Power(self.0 / scalar) + } +} + +/// Thermal conductance in Watts per Kelvin (W/K). +/// +/// Represents the heat transfer coefficient (UA value) for thermal coupling +/// between circuits or components. +#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)] +pub struct ThermalConductance(pub f64); + +impl ThermalConductance { + /// Creates a ThermalConductance from a value in Watts per Kelvin (W/K). + pub fn from_watts_per_kelvin(value: f64) -> Self { + ThermalConductance(value) + } + + /// Creates a ThermalConductance from a value in kilowatts per Kelvin (kW/K). + pub fn from_kilowatts_per_kelvin(value: f64) -> Self { + ThermalConductance(value * 1_000.0) + } + + /// Returns the thermal conductance in Watts per Kelvin. + pub fn to_watts_per_kelvin(&self) -> f64 { + self.0 + } + + /// Returns the thermal conductance in kilowatts per Kelvin. + pub fn to_kilowatts_per_kelvin(&self) -> f64 { + self.0 / 1_000.0 + } +} + +impl fmt::Display for ThermalConductance { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{} W/K", self.0) + } +} + +impl From for ThermalConductance { + fn from(value: f64) -> Self { + ThermalConductance(value) + } +} + #[cfg(test)] mod tests { use super::*; @@ -656,6 +821,20 @@ mod tests { assert_relative_eq!(m1.to_grams_per_s(), 500.0, epsilon = 1e-6); } + #[test] + fn test_mass_flow_regularized() { + use super::MIN_MASS_FLOW_REGULARIZATION_KG_S; + let zero = MassFlow::from_kg_per_s(0.0); + let r = zero.regularized(); + assert_relative_eq!(r.to_kg_per_s(), MIN_MASS_FLOW_REGULARIZATION_KG_S, epsilon = 1e-15); + let small = MassFlow::from_kg_per_s(1e-14); + let r2 = small.regularized(); + assert_relative_eq!(r2.to_kg_per_s(), MIN_MASS_FLOW_REGULARIZATION_KG_S, epsilon = 1e-15); + let normal = MassFlow::from_kg_per_s(0.5); + let r3 = normal.regularized(); + assert_relative_eq!(r3.to_kg_per_s(), 0.5, epsilon = 1e-10); + } + // ==================== TYPE SAFETY TESTS ==================== #[test] @@ -748,4 +927,53 @@ mod tests { let m = MassFlow::from_kg_per_s(1e-12); assert_relative_eq!(m.to_kg_per_s(), 1e-12, epsilon = 1e-17); } + + // ==================== POWER TESTS ==================== + + #[test] + fn test_power_from_watts() { + let p = Power::from_watts(1000.0); + assert_relative_eq!(p.0, 1000.0, epsilon = 1e-10); + assert_relative_eq!(p.to_watts(), 1000.0, epsilon = 1e-10); + } + + #[test] + fn test_power_from_kilowatts() { + let p = Power::from_kilowatts(1.0); + assert_relative_eq!(p.to_watts(), 1000.0, epsilon = 1e-6); + assert_relative_eq!(p.to_kilowatts(), 1.0, epsilon = 1e-6); + } + + #[test] + fn test_power_from_megawatts() { + let p = Power::from_megawatts(1.0); + assert_relative_eq!(p.to_watts(), 1_000_000.0, epsilon = 1e-6); + assert_relative_eq!(p.to_megawatts(), 1.0, epsilon = 1e-6); + } + + #[test] + fn test_power_display() { + let p = Power::from_watts(5000.0); + assert_eq!(format!("{}", p), "5000 W"); + } + + #[test] + fn test_power_arithmetic() { + let p1 = Power::from_watts(1000.0); + let p2 = Power::from_watts(500.0); + let p3 = p1 + p2; + assert_relative_eq!(p3.to_watts(), 1500.0, epsilon = 1e-10); + + let p4 = p1 - p2; + assert_relative_eq!(p4.to_watts(), 500.0, epsilon = 1e-10); + + let p5 = p1 * 2.0; + assert_relative_eq!(p5.to_watts(), 2000.0, epsilon = 1e-10); + + let p6 = p1 / 2.0; + assert_relative_eq!(p6.to_watts(), 500.0, epsilon = 1e-10); + + let p7 = 2.0 * p1; + assert_relative_eq!(p7.to_watts(), 2000.0, epsilon = 1e-10); + } } diff --git a/crates/fluids/Cargo.toml b/crates/fluids/Cargo.toml new file mode 100644 index 0000000..bcd298d --- /dev/null +++ b/crates/fluids/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "entropyk-fluids" +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true +repository.workspace = true +description = "Fluid properties backend for Entropyk thermodynamic simulation library" + +[dependencies] +entropyk-core = { path = "../core" } +thiserror.workspace = true +serde.workspace = true +serde_json = "1.0" +lru = "0.12" +entropyk-coolprop-sys = { path = "coolprop-sys", optional = true } + +[features] +default = [] +coolprop = ["entropyk-coolprop-sys"] + +[dev-dependencies] +approx = "0.5" +criterion = "0.5" + +[[bench]] +name = "cache_10k" +harness = false diff --git a/crates/fluids/benches/cache_10k.rs b/crates/fluids/benches/cache_10k.rs new file mode 100644 index 0000000..f0718d1 --- /dev/null +++ b/crates/fluids/benches/cache_10k.rs @@ -0,0 +1,54 @@ +//! Benchmark: 10k repeated (P,T) queries — cached vs uncached (Story 2.4 AC#4). +//! +//! Compares throughput of CachedBackend vs raw backend for repeated same-state queries. +//! Cached path should show significant speedup when the backend is expensive (e.g. CoolProp). + +use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use entropyk_fluids::{ + CachedBackend, FluidBackend, FluidId, Property, ThermoState, TestBackend, +}; +use entropyk_core::{Pressure, Temperature}; + +const N_QUERIES: u32 = 10_000; + +fn bench_uncached_10k(c: &mut Criterion) { + let backend = TestBackend::new(); + let state = ThermoState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(25.0), + ); + let fluid = FluidId::new("R134a"); + + c.bench_function("uncached_10k_same_state", |b| { + b.iter(|| { + for _ in 0..N_QUERIES { + black_box( + backend.property(fluid.clone(), Property::Density, state.clone()).unwrap(), + ); + } + }); + }); +} + +fn bench_cached_10k(c: &mut Criterion) { + let inner = TestBackend::new(); + let cached = CachedBackend::new(inner); + let state = ThermoState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(25.0), + ); + let fluid = FluidId::new("R134a"); + + c.bench_function("cached_10k_same_state", |b| { + b.iter(|| { + for _ in 0..N_QUERIES { + black_box( + cached.property(fluid.clone(), Property::Density, state.clone()).unwrap(), + ); + } + }); + }); +} + +criterion_group!(benches, bench_uncached_10k, bench_cached_10k); +criterion_main!(benches); diff --git a/crates/fluids/build.rs b/crates/fluids/build.rs new file mode 100644 index 0000000..1ce4129 --- /dev/null +++ b/crates/fluids/build.rs @@ -0,0 +1,18 @@ +//! Build script for entropyk-fluids crate. +//! +//! This build script can optionally compile CoolProp C++ library when the +//! "coolprop" feature is enabled. + +use std::env; + +fn main() { + let coolprop_enabled = env::var("CARGO_FEATURE_COOLPROP").is_ok(); + + if coolprop_enabled { + println!("cargo:rustc-link-lib=dylib=coolprop"); + println!("cargo:rerun-if-changed=build.rs"); + } + + // Tell Cargo to rerun this script if any source files change + println!("cargo:rerun-if-changed=build.rs"); +} diff --git a/crates/fluids/coolprop-sys/build.rs b/crates/fluids/coolprop-sys/build.rs new file mode 100644 index 0000000..7a7d98a --- /dev/null +++ b/crates/fluids/coolprop-sys/build.rs @@ -0,0 +1,64 @@ +//! Build script for coolprop-sys. +//! +//! This compiles the CoolProp C++ library statically. + +use std::env; +use std::path::PathBuf; + +fn coolprop_src_path() -> Option { + // Try to find CoolProp source in common locations + let possible_paths = vec![ + // Vendor directory (recommended) + PathBuf::from("vendor/coolprop"), + // External directory + PathBuf::from("external/coolprop"), + // System paths + PathBuf::from("/usr/local/src/CoolProp"), + PathBuf::from("/opt/CoolProp"), + ]; + + for path in possible_paths { + if path.join("CMakeLists.txt").exists() { + return Some(path); + } + } + + None +} + +fn main() { + let static_linking = env::var("CARGO_FEATURE_STATIC").is_ok(); + + // Check if CoolProp source is available + if let Some(coolprop_path) = coolprop_src_path() { + println!("cargo:rerun-if-changed={}", coolprop_path.display()); + + // Configure build for CoolProp + println!( + "cargo:rustc-link-search=native={}/build", + coolprop_path.display() + ); + } + + // Link against CoolProp + if static_linking { + // Static linking - find libCoolProp.a + println!("cargo:rustc-link-lib=static=CoolProp"); + } else { + // Dynamic linking + println!("cargo:rustc-link-lib=dylib=CoolProp"); + } + + // Link required system libraries + println!("cargo:rustc-link-lib=dylib=m"); + println!("cargo:rustc-link-lib=dylib=stdc++"); + + // Tell Cargo to rerun if build.rs changes + println!("cargo:rerun-if-changed=build.rs"); + + println!( + "cargo:warning=CoolProp source not found in vendor/. + For full static build, run: + git clone https://github.com/CoolProp/CoolProp.git vendor/coolprop" + ); +} diff --git a/crates/fluids/coolprop-sys/src/lib.rs b/crates/fluids/coolprop-sys/src/lib.rs new file mode 100644 index 0000000..dbb05f9 --- /dev/null +++ b/crates/fluids/coolprop-sys/src/lib.rs @@ -0,0 +1,336 @@ +//! FFI bindings to CoolProp C++ library. +//! +//! This module provides low-level FFI bindings to the CoolProp library. +//! All functions are unsafe and require proper error handling. + +#![allow(dead_code)] + +use libc::{c_char, c_double, c_int}; +use std::ffi::CString; + +/// Error codes returned by CoolProp +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[repr(i32)] +pub enum CoolPropError { + /// No error occurred + NoError = 0, + /// Input error code + InputError = 1, + /// Library not loaded + LibraryNotLoaded = 2, + /// Unknown property value + UnknownPropertyValue = 3, + /// Unknown fluid + UnknownFluid = 4, + /// Unknown parameter + UnknownParameter = 5, + /// Not implemented + NotImplemented = 6, + /// Invalid number of parameters + InvalidNumber = 7, + /// Could not load library + CouldNotLoadLibrary = 8, + /// Invalid fluid pair + InvalidFluidPair = 9, + /// Version mismatch + VersionMismatch = 10, + /// Internal error + InternalError = 11, +} + +impl CoolPropError { + /// Convert CoolProp error code to Rust result + pub fn from_code(code: i32) -> Result<(), CoolPropError> { + match code { + 0 => Ok(()), + _ => Err(match code { + 1 => CoolPropError::InputError, + 2 => CoolPropError::LibraryNotLoaded, + 3 => CoolPropError::UnknownPropertyValue, + 4 => CoolPropError::UnknownFluid, + 5 => CoolPropError::UnknownParameter, + 6 => CoolPropError::NotImplemented, + 7 => CoolPropError::InvalidNumber, + 8 => CoolPropError::CouldNotLoadLibrary, + 9 => CoolPropError::InvalidFluidPair, + 10 => CoolPropError::VersionMismatch, + _ => CoolPropError::InternalError, + }), + } + } +} + +/// Output parameters for CoolProp +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[repr(i32)] +pub enum CoolPropParam { + /// Nothing + Nothing = 0, + /// Pressure [Pa] + Pressure = 1, + /// Temperature [K] + Temperature = 2, + /// Density [kg/m³] + Density = 3, + /// Specific enthalpy [J/kg] + Enthalpy = 4, + /// Specific entropy [J/kg/K] + Entropy = 5, + /// Specific internal energy [J/kg] + InternalEnergy = 6, + /// Specific heat at constant pressure [J/kg/K] + Cv = 7, + /// Specific heat at constant pressure [J/kg/K] + Cp = 8, + /// Quality [-] + Quality = 9, + /// Viscosity [Pa·s] + Viscosity = 10, + /// Thermal conductivity [W/m/K] + Conductivity = 11, + /// Surface tension [N/m] + SurfaceTension = 12, + /// Prandtl number [-] + Prandtl = 13, +} + +/// Input parameters for CoolProp +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[repr(i32)] +pub enum CoolPropInputPair { + /// No input + None = 0, + /// Pressure & Temperature + PT = 1, + /// Pressure & Density + PD = 2, + /// Pressure & Enthalpy + PH = 3, + /// Pressure & Entropy + PS = 4, + /// Pressure & Internal Energy + PU = 5, + /// Temperature & Density + TD = 6, + /// Temperature & Enthalpy + TH = 7, + /// Temperature & Entropy + TS = 8, + /// Temperature & Internal Energy + TU = 9, + /// Enthalpy & Entropy + HS = 10, + /// Density & Internal Energy + DU = 11, + /// Pressure & Quality + PQ = 12, + /// Temperature & Quality + TQ = 13, +} + +// CoolProp C functions +extern "C" { + /// Get a property value using pressure and temperature + fn CoolProp_PropsSI( + Output: c_char, + Name1: c_char, + Value1: c_double, + Name2: c_char, + Value2: c_double, + Fluid: *const c_char, + ) -> c_double; + + /// Get a property value using input pair + fn CoolProp_Props1SI(Fluid: *const c_char, Output: c_char) -> c_double; + + /// Get CoolProp version string + fn CoolProp_get_global_param_string( + Param: *const c_char, + Output: *mut c_char, + OutputLength: c_int, + ) -> c_int; + + /// Get fluid info + fn CoolProp_get_fluid_param_string( + Fluid: *const c_char, + Param: *const c_char, + Output: *mut c_char, + OutputLength: c_int, + ) -> c_int; + + /// Check if fluid exists + fn CoolProp_isfluid(Fluid: *const c_char) -> c_int; + + /// Get saturation temperature + fn CoolProp_Saturation_T(Fluid: *const c_char, Par: c_char, Value: c_double) -> c_double; + + /// Get critical point + fn CoolProp_CriticalPoint(Fluid: *const c_char, Output: c_char) -> c_double; +} + +/// Get a thermodynamic property using pressure and temperature. +/// +/// # Arguments +/// * `property` - The property to retrieve (e.g., "D" for density, "H" for enthalpy) +/// * `p` - Pressure in Pa +/// * `t` - Temperature in K +/// * `fluid` - Fluid name (e.g., "R134a") +/// +/// # Returns +/// The property value in SI units, or NaN if an error occurs +pub unsafe fn props_si_pt(property: &str, p: f64, t: f64, fluid: &str) -> f64 { + let prop = property.as_bytes()[0] as c_char; + let fluid_c = CString::new(fluid).unwrap(); + + CoolProp_PropsSI(prop, b'P' as c_char, p, b'T' as c_char, t, fluid_c.as_ptr()) +} + +/// Get a thermodynamic property using pressure and enthalpy. +/// +/// # Arguments +/// * `property` - The property to retrieve +/// * `p` - Pressure in Pa +/// * `h` - Specific enthalpy in J/kg +/// * `fluid` - Fluid name +/// +/// # Returns +/// The property value in SI units, or NaN if an error occurs +pub unsafe fn props_si_ph(property: &str, p: f64, h: f64, fluid: &str) -> f64 { + let prop = property.as_bytes()[0] as c_char; + let fluid_c = CString::new(fluid).unwrap(); + + CoolProp_PropsSI(prop, b'P' as c_char, p, b'H' as c_char, h, fluid_c.as_ptr()) +} + +/// Get a thermodynamic property using temperature and quality (saturation). +/// +/// # Arguments +/// * `property` - The property to retrieve (D, H, S, P, etc.) +/// * `t` - Temperature in K +/// * `q` - Quality (0 = saturated liquid, 1 = saturated vapor) +/// * `fluid` - Fluid name +/// +/// # Returns +/// The property value in SI units, or NaN if an error occurs +pub unsafe fn props_si_tq(property: &str, t: f64, q: f64, fluid: &str) -> f64 { + let prop = property.as_bytes()[0] as c_char; + let fluid_c = CString::new(fluid).unwrap(); + + CoolProp_PropsSI(prop, b'T' as c_char, t, b'Q' as c_char, q, fluid_c.as_ptr()) +} + +/// Get a thermodynamic property using pressure and quality. +/// +/// # Arguments +/// * `property` - The property to retrieve +/// * `p` - Pressure in Pa +/// * `x` - Quality (0-1) +/// * `fluid` - Fluid name +/// +/// # Returns +/// The property value in SI units, or NaN if an error occurs +pub unsafe fn props_si_px(property: &str, p: f64, x: f64, fluid: &str) -> f64 { + let prop = property.as_bytes()[0] as c_char; + let fluid_c = CString::new(fluid).unwrap(); + + CoolProp_PropsSI( + prop, + b'P' as c_char, + p, + b'Q' as c_char, // Q for quality + x, + fluid_c.as_ptr(), + ) +} + +/// Get critical point temperature for a fluid. +/// +/// # Arguments +/// * `fluid` - Fluid name +/// +/// # Returns +/// Critical temperature in K, or NaN if unavailable +pub unsafe fn critical_temperature(fluid: &str) -> f64 { + let fluid_c = CString::new(fluid).unwrap(); + CoolProp_CriticalPoint(fluid_c.as_ptr(), b'T' as c_char) +} + +/// Get critical point pressure for a fluid. +/// +/// # Arguments +/// * `fluid` - Fluid name +/// +/// # Returns +/// Critical pressure in Pa, or NaN if unavailable +pub unsafe fn critical_pressure(fluid: &str) -> f64 { + let fluid_c = CString::new(fluid).unwrap(); + CoolProp_CriticalPoint(fluid_c.as_ptr(), b'P' as c_char) +} + +/// Get critical point density for a fluid. +/// +/// # Arguments +/// * `fluid` - Fluid name +/// +/// # Returns +/// Critical density in kg/m³, or NaN if unavailable +pub unsafe fn critical_density(fluid: &str) -> f64 { + let fluid_c = CString::new(fluid).unwrap(); + CoolProp_CriticalPoint(fluid_c.as_ptr(), b'D' as c_char) +} + +/// Check if a fluid is available in CoolProp. +/// +/// # Arguments +/// * `fluid` - Fluid name +/// +/// # Returns +/// `true` if the fluid is available +pub unsafe fn is_fluid_available(fluid: &str) -> bool { + let fluid_c = CString::new(fluid).unwrap(); + CoolProp_isfluid(fluid_c.as_ptr()) != 0 +} + +/// Get CoolProp version string. +/// +/// # Returns +/// Version string (e.g., "6.14.0") +pub fn get_version() -> String { + unsafe { + let mut buffer = vec![0u8; 32]; + let result = CoolProp_get_global_param_string( + b"version\0".as_ptr() as *const c_char, + buffer.as_mut_ptr() as *mut c_char, + buffer.len() as c_int, + ); + + if result == 0 { + String::from_utf8_lossy(&buffer) + .trim_end_matches('\0') + .to_string() + } else { + String::from("Unknown") + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_version() { + let version = get_version(); + assert!(!version.is_empty()); + } + + #[test] + fn test_fluid_available() { + // Test some common refrigerants + unsafe { + assert!(is_fluid_available("R134a")); + assert!(is_fluid_available("R410A")); + assert!(is_fluid_available("CO2")); + } + } +} diff --git a/crates/fluids/data/r134a.json b/crates/fluids/data/r134a.json new file mode 100644 index 0000000..db40d0d --- /dev/null +++ b/crates/fluids/data/r134a.json @@ -0,0 +1,63 @@ +{ + "fluid": "R134a", + "critical_point": { + "tc": 374.21, + "pc": 4059000, + "rho_c": 512 + }, + "single_phase": { + "pressure": [100000, 200000, 500000, 1000000, 2000000, 3000000], + "temperature": [250, 270, 290, 298.15, 320, 350], + "density": [ + 5.2, 4.9, 4.5, 4.4, 4.0, 3.6, + 12.0, 10.5, 9.0, 8.5, 7.5, 6.5, + 35.0, 28.0, 22.0, 20.0, 16.0, 12.0, + 75.0, 55.0, 40.0, 35.0, 25.0, 18.0, + 150.0, 100.0, 65.0, 55.0, 38.0, 25.0, + 220.0, 140.0, 85.0, 70.0, 48.0, 30.0 + ], + "enthalpy": [ + 380000, 395000, 410000, 415000, 430000, 450000, + 370000, 388000, 405000, 412000, 428000, 448000, + 355000, 378000, 398000, 406000, 424000, 445000, + 340000, 365000, 390000, 400000, 420000, 442000, + 320000, 350000, 378000, 392000, 415000, 438000, + 300000, 335000, 368000, 384000, 410000, 435000 + ], + "entropy": [ + 1750, 1780, 1810, 1820, 1850, 1890, + 1720, 1760, 1795, 1805, 1840, 1880, + 1680, 1730, 1775, 1788, 1825, 1870, + 1630, 1695, 1750, 1765, 1810, 1860, + 1570, 1650, 1715, 1735, 1790, 1845, + 1510, 1605, 1685, 1710, 1770, 1830 + ], + "cp": [ + 900, 920, 950, 960, 1000, 1050, + 880, 910, 940, 950, 990, 1040, + 850, 890, 925, 940, 980, 1030, + 820, 870, 910, 928, 970, 1020, + 790, 850, 900, 920, 965, 1015, + 765, 835, 890, 915, 962, 1010 + ], + "cv": [ + 750, 770, 800, 810, 850, 900, + 730, 760, 790, 800, 840, 890, + 700, 740, 775, 790, 830, 880, + 670, 720, 760, 778, 820, 870, + 640, 700, 745, 765, 812, 862, + 615, 680, 730, 752, 805, 855 + ] + }, + "saturation": { + "temperature": [250, 260, 270, 280, 290, 298.15, 310, 320, 330, 340, 350], + "pressure": [164000, 232000, 320000, 430000, 565000, 666000, 890000, 1165000, 1500000, 1900000, 2370000], + "h_liq": [200000, 215000, 230000, 245000, 260000, 272000, 288000, 305000, 322000, 340000, 358000], + "h_vap": [395000, 402000, 408000, 413000, 417000, 420000, 423000, 425000, 426000, 427000, 427500], + "rho_liq": [1350, 1320, 1290, 1255, 1218, 1188, 1145, 1098, 1045, 985, 915], + "rho_vap": [8.2, 11.2, 15.0, 19.8, 25.8, 30.5, 39.5, 50.5, 64.0, 80.5, 101.0], + "s_liq": [950, 1000, 1050, 1095, 1140, 1175, 1225, 1275, 1325, 1375, 1425], + "s_vap": [1720, 1710, 1700, 1690, 1680, 1675, 1668, 1660, 1652, 1643, 1633 + ] + } +} diff --git a/crates/fluids/src/backend.rs b/crates/fluids/src/backend.rs new file mode 100644 index 0000000..66a21b2 --- /dev/null +++ b/crates/fluids/src/backend.rs @@ -0,0 +1,166 @@ +//! Fluid backend trait and implementations. +//! +//! This module defines the core `FluidBackend` trait that abstracts the source +//! of thermodynamic property data, allowing the solver to switch between different +//! backends (CoolProp, tabular data, mock for testing). + +use crate::errors::FluidResult; +use crate::mixture::Mixture; +use crate::types::{CriticalPoint, FluidId, Phase, Property, FluidState, ThermoState}; +use entropyk_core::{Pressure, Temperature}; + +/// Trait for fluid property backends. +/// +/// Implementors must provide methods to query thermodynamic properties +/// for various fluids. This allows the solver to work with different +/// property sources (CoolProp, tabular data, mock data for testing). +/// +/// # Example +/// +/// ``` +/// use entropyk_fluids::{FluidBackend, FluidId, Property, FluidState, ThermoState, FluidError, FluidResult, CriticalPoint}; +/// +/// struct MyBackend; +/// impl FluidBackend for MyBackend { +/// fn property(&self, _fluid: FluidId, _property: Property, _state: FluidState) -> FluidResult { +/// Ok(1.0) +/// } +/// fn critical_point(&self, fluid: FluidId) -> FluidResult { +/// Err(FluidError::NoCriticalPoint { fluid: fluid.0 }) +/// } +/// fn is_fluid_available(&self, _fluid: &FluidId) -> bool { false } +/// fn phase(&self, _fluid: FluidId, _state: FluidState) -> FluidResult { +/// Ok(entropyk_fluids::Phase::Unknown) +/// } +/// fn full_state(&self, _fluid: FluidId, _p: entropyk_core::Pressure, _h: entropyk_core::Enthalpy) -> FluidResult { +/// Err(FluidError::UnsupportedProperty { property: "full_state".to_string() }) +/// } +/// fn list_fluids(&self) -> Vec { vec![] } +/// } +/// ``` +pub trait FluidBackend: Send + Sync { + /// Query a thermodynamic property for a fluid at a given state. + /// + /// # Arguments + /// * `fluid` - The fluid identifier (e.g., "R134a", "CO2") + /// * `property` - The property to query + /// * `state` - The thermodynamic state specification + /// + /// # Returns + /// The property value in SI units, or an error if the property + /// cannot be computed (unknown fluid, invalid state, etc.) + fn property(&self, fluid: FluidId, property: Property, state: FluidState) -> FluidResult; + + /// Compute the complete thermodynamic state of a fluid at a given pressure and enthalpy. + /// + /// This method is intended to be implemented by backends capable of natively calculating + /// all key parameters (phase, saturation temperatures, qualities, limits) without the user + /// needing to query them individually. + /// + /// # Arguments + /// * `fluid` - The fluid identifier + /// * `p` - The absolute pressure + /// * `h` - The specific enthalpy + /// + /// # Returns + /// The comprehensive `ThermoState` Snapshot, or an Error. + fn full_state(&self, fluid: FluidId, p: Pressure, h: entropyk_core::Enthalpy) -> FluidResult; + + /// Get critical point data for a fluid. + /// + /// # Arguments + /// * `fluid` - The fluid identifier + /// + /// # Returns + /// The critical point (Tc, Pc, density), or an error if not available + fn critical_point(&self, fluid: FluidId) -> FluidResult; + + /// Check if a fluid is available in this backend. + /// + /// # Arguments + /// * `fluid` - The fluid identifier + /// + /// # Returns + /// `true` if the fluid is available, `false` otherwise + fn is_fluid_available(&self, fluid: &FluidId) -> bool; + + /// Get the phase of a fluid at a given state. + /// + /// # Arguments + /// * `fluid` - The fluid identifier + /// * `state` - The thermodynamic state + /// + /// # Returns + /// The phase (Liquid, Vapor, TwoPhase, etc.) + fn phase(&self, fluid: FluidId, state: FluidState) -> FluidResult; + + /// List all available fluids in this backend. + fn list_fluids(&self) -> Vec; + + /// Calculate the bubble point temperature for a mixture at given pressure. + /// + /// The bubble point is the temperature at which a liquid mixture begins to boil + /// (saturated liquid temperature). + /// + /// # Arguments + /// * `pressure` - The pressure in Pa + /// * `mixture` - The mixture composition + /// + /// # Returns + /// The bubble point temperature in Kelvin + fn bubble_point(&self, _pressure: Pressure, _mixture: &Mixture) -> FluidResult { + Err(crate::errors::FluidError::UnsupportedProperty { + property: "Bubble point calculation not supported by this backend".to_string(), + }) + } + + /// Calculate the dew point temperature for a mixture at given pressure. + /// + /// The dew point is the temperature at which a vapor mixture begins to condense + /// (saturated vapor temperature). + /// + /// # Arguments + /// * `pressure` - The pressure in Pa + /// * `mixture` - The mixture composition + /// + /// # Returns + /// The dew point temperature in Kelvin + fn dew_point(&self, _pressure: Pressure, _mixture: &Mixture) -> FluidResult { + Err(crate::errors::FluidError::UnsupportedProperty { + property: "Dew point calculation not supported by this backend".to_string(), + }) + } + + /// Calculate the temperature glide for a mixture at given pressure. + /// + /// Temperature glide is the difference between dew point and bubble point + /// temperatures: T_glide = T_dew - T_bubble. + /// This is non-zero for zeotropic mixtures and zero for azeotropes/pure fluids. + /// + /// # Arguments + /// * `pressure` - The pressure in Pa + /// * `mixture` - The mixture composition + /// + /// # Returns + /// The temperature glide in Kelvin + fn temperature_glide(&self, pressure: Pressure, mixture: &Mixture) -> FluidResult { + let t_bubble = self.bubble_point(pressure, mixture)?; + let t_dew = self.dew_point(pressure, mixture)?; + Ok(t_dew.to_kelvin() - t_bubble.to_kelvin()) + } + + /// Check if a mixture is supported by this backend. + /// + /// # Arguments + /// * `mixture` - The mixture to check + /// + /// # Returns + /// `true` if the mixture is supported, `false` otherwise + fn is_mixture_supported(&self, mixture: &Mixture) -> bool { + // Default implementation: check if all components are available + mixture + .components() + .iter() + .all(|c| self.is_fluid_available(&FluidId::new(c))) + } +} diff --git a/crates/fluids/src/cache.rs b/crates/fluids/src/cache.rs new file mode 100644 index 0000000..3036b61 --- /dev/null +++ b/crates/fluids/src/cache.rs @@ -0,0 +1,235 @@ +//! Thread-local LRU cache for fluid property queries. +//! +//! Avoids redundant backend calls without mutex contention by using +//! per-thread storage. Cache keys use quantized state values since f64 +//! does not implement Hash. +//! +//! # Quantization Strategy +//! +//! State values (P, T, h, s, x) are quantized to 1e-9 relative precision +//! for cache key derivation. Solver iterations often repeat the same +//! (P,T) or (P,h) states; quantization should not lose cache hits for +//! typical thermodynamic ranges (P: 1e3–1e7 Pa, T: 200–600 K). + +use crate::mixture::Mixture; +use crate::types::{FluidId, Property, FluidState}; +use lru::LruCache; +use std::cell::RefCell; +use std::hash::{Hash, Hasher}; +use std::num::NonZeroUsize; + +/// Default cache capacity (entries). LRU eviction when exceeded. +pub const DEFAULT_CACHE_CAPACITY: usize = 10_000; + +/// Default capacity as NonZeroUsize for LruCache (avoids unwrap in production path). +const DEFAULT_CAP_NONZERO: NonZeroUsize = unsafe { NonZeroUsize::new_unchecked(DEFAULT_CACHE_CAPACITY) }; + +/// Quantization factor: values rounded to 1e-9 relative. +/// (v * 1e9).round() as i64 for Hash-compatible key. +#[inline] +fn quantize(v: f64) -> i64 { + if v.is_nan() || v.is_infinite() { + 0 + } else { + (v * 1e9).round() as i64 + } +} + +/// Cache key for fluid property lookups. +/// +/// Uses quantized state values since f64 does not implement Hash. +/// Includes backend_id so multiple CachedBackend instances don't mix results. +/// For mixtures, includes a hash of the mixture composition. +#[derive(Clone, Debug)] +pub struct CacheKey { + backend_id: usize, + fluid: String, + property: Property, + variant: u8, + p_quantized: i64, + second_quantized: i64, + mixture_hash: Option, +} + +impl PartialEq for CacheKey { + fn eq(&self, other: &Self) -> bool { + self.backend_id == other.backend_id + && self.fluid == other.fluid + && self.property == other.property + && self.variant == other.variant + && self.p_quantized == other.p_quantized + && self.second_quantized == other.second_quantized + && self.mixture_hash == other.mixture_hash + } +} + +impl Eq for CacheKey {} + +impl Hash for CacheKey { + fn hash(&self, state: &mut H) { + self.backend_id.hash(state); + self.fluid.hash(state); + self.property.hash(state); + self.variant.hash(state); + self.p_quantized.hash(state); + self.second_quantized.hash(state); + self.mixture_hash.hash(state); + } +} + +impl CacheKey { + /// Build a cache key from fluid, property, state, and backend id. + pub fn new(backend_id: usize, fluid: &FluidId, property: Property, state: &FluidState) -> Self { + let (p, second, variant, mixture_hash) = match state { + FluidState::PressureTemperature(p, t) => (p.to_pascals(), t.to_kelvin(), 0u8, None), + FluidState::PressureEnthalpy(p, h) => { + (p.to_pascals(), h.to_joules_per_kg(), 1u8, None) + } + FluidState::PressureEntropy(p, s) => { + (p.to_pascals(), s.to_joules_per_kg_kelvin(), 2u8, None) + } + FluidState::PressureQuality(p, x) => (p.to_pascals(), x.value(), 3u8, None), + FluidState::PressureTemperatureMixture(p, t, ref m) => { + (p.to_pascals(), t.to_kelvin(), 4u8, Some(mix_hash(m))) + } + FluidState::PressureEnthalpyMixture(p, h, ref m) => { + (p.to_pascals(), h.to_joules_per_kg(), 5u8, Some(mix_hash(m))) + } + FluidState::PressureQualityMixture(p, x, ref m) => { + (p.to_pascals(), x.value(), 6u8, Some(mix_hash(m))) + } + }; + CacheKey { + backend_id, + fluid: fluid.0.clone(), + property, + variant, + p_quantized: quantize(p), + second_quantized: quantize(second), + mixture_hash, + } + } +} + +/// Compute a simple hash for a mixture for cache key purposes. +fn mix_hash(mixture: &Mixture) -> u64 { + use std::collections::hash_map::DefaultHasher; + let mut hasher = DefaultHasher::new(); + mixture.hash(&mut hasher); + hasher.finish() +} + +thread_local! { + static CACHE: RefCell> = RefCell::new( + LruCache::new(DEFAULT_CAP_NONZERO) + ); +} + +/// Get a value from the thread-local cache (no allocation on key build for hot path). +pub fn cache_get( + backend_id: usize, + fluid: &FluidId, + property: Property, + state: &FluidState, +) -> Option { + let key = CacheKey::new(backend_id, fluid, property, state); + CACHE.with(|c| { + let mut cache = c.borrow_mut(); + cache.get(&key).copied() + }) +} + +/// Insert a value into the thread-local cache. +pub fn cache_insert( + backend_id: usize, + fluid: &FluidId, + property: Property, + state: &FluidState, + value: f64, +) { + let key = CacheKey::new(backend_id, fluid, property, state); + CACHE.with(|c| { + let mut cache = c.borrow_mut(); + cache.put(key, value); + }); +} + +/// Clear the thread-local cache (e.g. at solver iteration boundaries). +pub fn cache_clear() { + CACHE.with(|c| { + let mut cache = c.borrow_mut(); + cache.clear(); + }); +} + +/// Resize the thread-local cache capacity. +pub fn cache_resize(capacity: NonZeroUsize) { + CACHE.with(|c| { + let mut cache = c.borrow_mut(); + cache.resize(capacity); + }); +} + +#[cfg(test)] +mod tests { + use super::*; + use entropyk_core::{Pressure, Temperature}; + + #[test] + fn test_cache_key_quantization() { + let fluid = FluidId::new("R134a"); + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + let key1 = CacheKey::new(0, &fluid, Property::Density, &state); + let key2 = CacheKey::new(0, &fluid, Property::Density, &state); + assert_eq!(key1, key2); + // Equal keys must have same hash (for HashMap use) + use std::collections::hash_map::DefaultHasher; + let mut h1 = DefaultHasher::new(); + let mut h2 = DefaultHasher::new(); + key1.hash(&mut h1); + key2.hash(&mut h2); + assert_eq!(h1.finish(), h2.finish()); + } + + #[test] + fn test_cache_key_different_states() { + let fluid = FluidId::new("R134a"); + let state1 = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + let state2 = FluidState::from_pt(Pressure::from_bar(2.0), Temperature::from_celsius(25.0)); + let key1 = CacheKey::new(0, &fluid, Property::Density, &state1); + let key2 = CacheKey::new(0, &fluid, Property::Density, &state2); + assert_ne!(key1, key2); + } + + #[test] + fn test_lru_eviction() { + use std::num::NonZeroUsize; + + cache_clear(); + cache_resize(NonZeroUsize::new(2).expect("2 is non-zero")); + + let fluid = FluidId::new("R134a"); + let state1 = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(20.0)); + let state2 = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + let state3 = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(30.0)); + + cache_insert(0, &fluid, Property::Density, &state1, 1000.0); + cache_insert(0, &fluid, Property::Density, &state2, 1100.0); + cache_insert(0, &fluid, Property::Density, &state3, 1200.0); + + assert!(cache_get(0, &fluid, Property::Density, &state1).is_none()); + assert_eq!(cache_get(0, &fluid, Property::Density, &state2), Some(1100.0)); + assert_eq!(cache_get(0, &fluid, Property::Density, &state3), Some(1200.0)); + + cache_resize(NonZeroUsize::new(DEFAULT_CACHE_CAPACITY).expect("capacity is non-zero")); + } + + #[test] + fn test_cache_key_different_backends() { + let fluid = FluidId::new("R134a"); + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + let key1 = CacheKey::new(0, &fluid, Property::Density, &state); + let key2 = CacheKey::new(1, &fluid, Property::Density, &state); + assert_ne!(key1, key2); + } +} diff --git a/crates/fluids/src/cached_backend.rs b/crates/fluids/src/cached_backend.rs new file mode 100644 index 0000000..13a66b4 --- /dev/null +++ b/crates/fluids/src/cached_backend.rs @@ -0,0 +1,174 @@ +//! Cached backend wrapper for fluid property queries. +//! +//! Wraps any `FluidBackend` with a thread-local LRU cache to avoid +//! redundant calculations. No mutex contention; zero allocation on cache hit. + +use crate::backend::FluidBackend; +use crate::cache::{cache_clear, cache_get, cache_insert}; +use crate::errors::FluidResult; +use crate::types::{CriticalPoint, FluidId, Phase, Property, FluidState}; +use std::sync::atomic::{AtomicUsize, Ordering}; + +static NEXT_BACKEND_ID: AtomicUsize = AtomicUsize::new(0); + +/// Backend wrapper that caches property queries in a thread-local LRU cache. +/// +/// Wraps any `FluidBackend` and caches successful property() results. +/// Other trait methods (critical_point, phase, etc.) delegate to the inner backend +/// without caching, as they are typically called less frequently. +/// +/// # Example +/// +/// ``` +/// use entropyk_fluids::{CachedBackend, FluidBackend, FluidId, Property, FluidState, TestBackend}; +/// use entropyk_core::{Pressure, Temperature}; +/// +/// let inner = TestBackend::new(); +/// let cached = CachedBackend::new(inner); +/// +/// let state = FluidState::from_pt( +/// Pressure::from_bar(1.0), +/// Temperature::from_celsius(25.0), +/// ); +/// +/// let v1 = cached.property(FluidId::new("R134a"), Property::Density, state.clone()).unwrap(); +/// let v2 = cached.property(FluidId::new("R134a"), Property::Density, state).unwrap(); +/// assert_eq!(v1, v2); // Second call served from cache +/// ``` +pub struct CachedBackend { + backend_id: usize, + inner: B, +} + +impl CachedBackend { + /// Create a new cached backend wrapping the given backend. + pub fn new(inner: B) -> Self { + let backend_id = NEXT_BACKEND_ID.fetch_add(1, Ordering::Relaxed); + CachedBackend { backend_id, inner } + } + + /// Clear the thread-local cache. Call at solver iteration boundaries if needed. + /// + /// **Note:** This clears the cache for *all* `CachedBackend` instances on the current + /// thread, since they share one thread-local cache. If you need per-backend invalidation, + /// use separate threads or a different caching strategy. + pub fn clear_cache(&self) { + cache_clear(); + } + + /// Get a reference to the inner backend. + pub fn inner(&self) -> &B { + &self.inner + } +} + +impl FluidBackend for CachedBackend { + fn property(&self, fluid: FluidId, property: Property, state: FluidState) -> FluidResult { + if let Some(v) = cache_get(self.backend_id, &fluid, property, &state) { + return Ok(v); + } + let v = self.inner.property(fluid.clone(), property, state.clone())?; + cache_insert(self.backend_id, &fluid, property, &state, v); + Ok(v) + } + + fn critical_point(&self, fluid: FluidId) -> FluidResult { + self.inner.critical_point(fluid) + } + + fn is_fluid_available(&self, fluid: &FluidId) -> bool { + self.inner.is_fluid_available(fluid) + } + + fn phase(&self, fluid: FluidId, state: FluidState) -> FluidResult { + self.inner.phase(fluid, state) + } + + fn list_fluids(&self) -> Vec { + self.inner.list_fluids() + } + + fn full_state(&self, fluid: FluidId, p: entropyk_core::Pressure, h: entropyk_core::Enthalpy) -> FluidResult { + self.inner.full_state(fluid, p, h) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::test_backend::TestBackend; + use entropyk_core::{Pressure, Temperature}; + + #[test] + fn test_cache_hit_returns_same_value() { + let inner = TestBackend::new(); + let cached = CachedBackend::new(inner); + + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + + let v1 = cached + .property(FluidId::new("R134a"), Property::Density, state.clone()) + .unwrap(); + let v2 = cached + .property(FluidId::new("R134a"), Property::Density, state) + .unwrap(); + assert_eq!(v1, v2); + } + + #[test] + fn test_cache_miss_delegates_to_backend() { + let inner = TestBackend::new(); + let cached = CachedBackend::new(inner); + + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + + let v = cached + .property(FluidId::new("R134a"), Property::Density, state) + .unwrap(); + assert!(v > 0.0); + } + + #[test] + fn test_cache_invalidation() { + let inner = TestBackend::new(); + let cached = CachedBackend::new(inner); + + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + + let _ = cached + .property(FluidId::new("R134a"), Property::Density, state.clone()) + .unwrap(); + cached.clear_cache(); + // After clear, next query should still work (delegates to backend) + let v = cached + .property(FluidId::new("R134a"), Property::Density, state) + .unwrap(); + assert!(v > 0.0); + } + + #[test] + fn test_cached_benchmark_10k_queries() { + let inner = TestBackend::new(); + let cached = CachedBackend::new(inner); + + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + + for _ in 0..10_000 { + let _ = cached + .property(FluidId::new("R134a"), Property::Density, state.clone()) + .unwrap(); + } + } + + #[test] + fn test_cached_backend_implements_fluid_backend() { + let inner = TestBackend::new(); + let cached = CachedBackend::new(inner); + + assert!(cached.is_fluid_available(&FluidId::new("R134a"))); + let cp = cached.critical_point(FluidId::new("R134a")).unwrap(); + assert!(cp.temperature_kelvin() > 300.0); + let fluids = cached.list_fluids(); + assert!(!fluids.is_empty()); + } +} diff --git a/crates/fluids/src/coolprop.rs b/crates/fluids/src/coolprop.rs new file mode 100644 index 0000000..bbeadab --- /dev/null +++ b/crates/fluids/src/coolprop.rs @@ -0,0 +1,647 @@ +//! CoolProp backend implementation. +//! +//! This module provides the `CoolPropBackend` struct that implements the `FluidBackend` trait +//! using the CoolProp C++ library for thermodynamic property calculations. + +#[cfg(feature = "coolprop")] +use crate::damped_backend::DampedBackend; +use crate::errors::{FluidError, FluidResult}; +use crate::types::{CriticalPoint, FluidId, Phase, Property, FluidState}; + +#[cfg(feature = "coolprop")] +use crate::mixture::Mixture; +#[cfg(feature = "coolprop")] +use std::collections::HashMap; +#[cfg(feature = "coolprop")] +use std::sync::RwLock; + +#[cfg(feature = "coolprop")] +use entropyk_coolprop_sys as coolprop; + +/// A fluid property backend using the CoolProp C++ library. +/// +/// This backend provides high-accuracy thermodynamic properties using the +/// CoolProp library, which implements the NIST REFPROP equations of state. +#[cfg(feature = "coolprop")] +pub struct CoolPropBackend { + /// Cache for critical point data + critical_cache: RwLock>, + /// List of available fluids + available_fluids: Vec, +} + +#[cfg(feature = "coolprop")] +impl CoolPropBackend { + /// Creates a new CoolPropBackend. + pub fn new() -> Self { + let backend = CoolPropBackend { + critical_cache: RwLock::new(HashMap::new()), + available_fluids: vec![ + FluidId::new("R134a"), + FluidId::new("R410A"), + FluidId::new("R404A"), + FluidId::new("R407C"), + FluidId::new("R32"), + FluidId::new("R125"), + FluidId::new("R744"), + FluidId::new("R290"), + FluidId::new("R600"), + FluidId::new("R600a"), + FluidId::new("Water"), + FluidId::new("Air"), + ], + }; + + backend + } + + /// Creates a new CoolPropBackend with critical point damping enabled. + /// + /// This wraps the backend with a `DampedBackend` to apply C1-continuous + /// damping to derivative properties (Cp, Cv, etc.) near the critical point, + /// preventing NaN values in Newton-Raphson iterations. + pub fn with_damping() -> DampedBackend { + DampedBackend::new(Self::new()) + } + + /// Get the CoolProp internal name for a fluid. + fn fluid_name(&self, fluid: &FluidId) -> String { + // Map common names to CoolProp internal names + match fluid.0.to_lowercase().as_str() { + "r134a" => "R134a".to_string(), + "r410a" => "R410A".to_string(), + "r404a" => "R404A".to_string(), + "r407c" => "R407C".to_string(), + "r32" => "R32".to_string(), + "r125" => "R125".to_string(), + "co2" | "r744" => "CO2".to_string(), + "r290" => "R290".to_string(), + "r600" => "R600".to_string(), + "r600a" => "R600A".to_string(), + "water" => "Water".to_string(), + "air" => "Air".to_string(), + n => n.to_string(), + } + } + + /// Convert Property to CoolProp character code. + fn property_code(property: Property) -> &'static str { + match property { + Property::Density => "D", + Property::Enthalpy => "H", + Property::Entropy => "S", + Property::InternalEnergy => "U", + Property::Cp => "C", + Property::Cv => "O", // Cv in CoolProp + Property::SpeedOfSound => "A", + Property::Viscosity => "V", + Property::ThermalConductivity => "L", + Property::SurfaceTension => "I", + Property::Quality => "Q", + Property::Temperature => "T", + Property::Pressure => "P", + } + } +} + +#[cfg(feature = "coolprop")] +impl Default for CoolPropBackend { + fn default() -> Self { + Self::new() + } +} + +#[cfg(feature = "coolprop")] +impl crate::backend::FluidBackend for CoolPropBackend { + fn property(&self, fluid: FluidId, property: Property, state: FluidState) -> FluidResult { + // Handle mixture states + if state.is_mixture() { + return self.property_mixture(fluid, property, state); + } + + let coolprop_fluid = self.fluid_name(&fluid); + let prop_code = Self::property_code(property); + + // Check if fluid is available + if !self.is_fluid_available(&fluid) { + return Err(FluidError::UnknownFluid { fluid: fluid.0 }); + } + + // Query property based on state input type + let result = match state { + FluidState::PressureTemperature(p, t) => unsafe { + coolprop::props_si_pt(prop_code, p.to_pascals(), t.to_kelvin(), &coolprop_fluid) + }, + FluidState::PressureEnthalpy(p, h) => unsafe { + coolprop::props_si_ph( + prop_code, + p.to_pascals(), + h.to_joules_per_kg(), + &coolprop_fluid, + ) + }, + FluidState::PressureEntropy(_p, _s) => { + // CoolProp doesn't have direct PS, use iterative approach or PH + return Err(FluidError::UnsupportedProperty { + property: format!("P-S not directly supported, use P-T or P-h"), + }); + } + FluidState::PressureQuality(p, q) => unsafe { + coolprop::props_si_px(prop_code, p.to_pascals(), q.value(), &coolprop_fluid) + }, + // Mixture variants handled above + FluidState::PressureTemperatureMixture(_, _, _) => unreachable!(), + FluidState::PressureEnthalpyMixture(_, _, _) => unreachable!(), + FluidState::PressureQualityMixture(_, _, _) => unreachable!(), + }; + + // Check for NaN (indicates error in CoolProp) + if result.is_nan() { + return Err(FluidError::InvalidState { + reason: format!("CoolProp returned NaN for {} at {:?}", fluid, state), + }); + } + + Ok(result) + } + + fn critical_point(&self, fluid: FluidId) -> FluidResult { + // Check cache first + if let Some(cp) = self.critical_cache.read().unwrap().get(&fluid.0) { + return Ok(*cp); + } + + let coolprop_fluid = self.fluid_name(&fluid); + + unsafe { + let tc = coolprop::critical_temperature(&coolprop_fluid); + let pc = coolprop::critical_pressure(&coolprop_fluid); + let dc = coolprop::critical_density(&coolprop_fluid); + + if tc.is_nan() || pc.is_nan() || dc.is_nan() { + return Err(FluidError::NoCriticalPoint { fluid: fluid.0 }); + } + + let cp = CriticalPoint::new( + entropyk_core::Temperature::from_kelvin(tc), + entropyk_core::Pressure::from_pascals(pc), + dc, + ); + + // Cache the result + self.critical_cache.write().unwrap().insert(fluid.0, cp); + + Ok(cp) + } + } + + fn is_fluid_available(&self, fluid: &FluidId) -> bool { + let coolprop_fluid = self.fluid_name(fluid); + unsafe { coolprop::is_fluid_available(&coolprop_fluid) } + } + + fn phase(&self, fluid: FluidId, state: FluidState) -> FluidResult { + // Handle mixture states + if state.is_mixture() { + return self.phase_mix(fluid, state); + } + + let quality = self.property(fluid.clone(), Property::Quality, state)?; + + if quality < 0.0 { + // Below saturated liquid - likely subcooled liquid + Ok(Phase::Liquid) + } else if quality > 1.0 { + // Above saturated vapor - superheated + Ok(Phase::Vapor) + } else if (quality - 0.0).abs() < 1e-6 { + // Saturated liquid + Ok(Phase::Liquid) + } else if (quality - 1.0).abs() < 1e-6 { + // Saturated vapor + Ok(Phase::Vapor) + } else { + // Two-phase region + Ok(Phase::TwoPhase) + } + } + + fn list_fluids(&self) -> Vec { + self.available_fluids.clone() + } + + fn bubble_point( + &self, + pressure: entropyk_core::Pressure, + mixture: &Mixture, + ) -> FluidResult { + if !self.is_mixture_supported(mixture) { + return Err(FluidError::MixtureNotSupported(format!( + "One or more components not available: {:?}", + mixture.components() + ))); + } + + let cp_string = mixture.to_coolprop_string(); + let p_pa = pressure.to_pascals(); + + unsafe { + // For bubble point (saturated liquid), use Q=0 + let t = coolprop::props_si_tq("T", p_pa, 0.0, &cp_string); + if t.is_nan() { + return Err(FluidError::NumericalError( + "CoolProp returned NaN for bubble point calculation".to_string(), + )); + } + Ok(entropyk_core::Temperature::from_kelvin(t)) + } + } + + fn dew_point( + &self, + pressure: entropyk_core::Pressure, + mixture: &Mixture, + ) -> FluidResult { + if !self.is_mixture_supported(mixture) { + return Err(FluidError::MixtureNotSupported(format!( + "One or more components not available: {:?}", + mixture.components() + ))); + } + + let cp_string = mixture.to_coolprop_string(); + let p_pa = pressure.to_pascals(); + + unsafe { + // For dew point (saturated vapor), use Q=1 + let t = coolprop::props_si_tq("T", p_pa, 1.0, &cp_string); + if t.is_nan() { + return Err(FluidError::NumericalError( + "CoolProp returned NaN for dew point calculation".to_string(), + )); + } + Ok(entropyk_core::Temperature::from_kelvin(t)) + } + } + + fn is_mixture_supported(&self, mixture: &Mixture) -> bool { + mixture + .components() + .iter() + .all(|c| self.is_fluid_available(&FluidId::new(c))) + } + + /// Property calculation for mixtures. + fn property_mixture( + &self, + fluid: FluidId, + property: Property, + state: FluidState, + ) -> FluidResult { + // Extract mixture from state + let mixture = match state { + FluidState::PressureTemperatureMixture(_, _, m) => m, + FluidState::PressureEnthalpyMixture(_, _, m) => m, + FluidState::PressureQualityMixture(_, _, m) => m, + _ => unreachable!(), + }; + + if !self.is_mixture_supported(&mixture) { + return Err(FluidError::MixtureNotSupported(format!( + "One or more components not available: {:?}", + mixture.components() + ))); + } + + let cp_string = mixture.to_coolprop_string(); + let prop_code = Self::property_code(property); + + let result = match state { + FluidState::PressureTemperatureMixture(p, t, _) => unsafe { + coolprop::props_si_pt(prop_code, p.to_pascals(), t.to_kelvin(), &cp_string) + }, + FluidState::PressureEnthalpyMixture(p, h, _) => unsafe { + coolprop::props_si_ph(prop_code, p.to_pascals(), h.to_joules_per_kg(), &cp_string) + }, + FluidState::PressureQualityMixture(p, q, _) => unsafe { + coolprop::props_si_px(prop_code, p.to_pascals(), q.value(), &cp_string) + }, + _ => unreachable!(), + }; + + if result.is_nan() { + return Err(FluidError::InvalidState { + reason: format!("CoolProp returned NaN for mixture at {:?}", state), + }); + } + + Ok(result) + } + + /// Phase calculation for mixtures. + fn phase_mix(&self, fluid: FluidId, state: FluidState) -> FluidResult { + let quality = self.property_mixture(fluid, Property::Quality, state)?; + + if quality < 0.0 { + Ok(Phase::Liquid) + } else if quality > 1.0 { + Ok(Phase::Vapor) + } else if (quality - 0.0).abs() < 1e-6 { + Ok(Phase::Liquid) + } else if (quality - 1.0).abs() < 1e-6 { + Ok(Phase::Vapor) + } else { + Ok(Phase::TwoPhase) + } + } + + fn full_state(&self, fluid: FluidId, p: entropyk_core::Pressure, h: entropyk_core::Enthalpy) -> FluidResult { + let coolprop_fluid = self.fluid_name(&fluid); + + if !self.is_fluid_available(&fluid) { + return Err(FluidError::UnknownFluid { fluid: fluid.0 }); + } + + let p_pa = p.to_pascals(); + let h_j_kg = h.to_joules_per_kg(); + + unsafe { + let t_k = coolprop::props_si_ph("T", p_pa, h_j_kg, &coolprop_fluid); + if t_k.is_nan() { + return Err(FluidError::InvalidState { + reason: format!("CoolProp returned NaN for Temperature at P={}, h={} for {}", p_pa, h_j_kg, fluid), + }); + } + + let s_j_kg_k = coolprop::props_si_ph("S", p_pa, h_j_kg, &coolprop_fluid); + let d_kg_m3 = coolprop::props_si_ph("D", p_pa, h_j_kg, &coolprop_fluid); + let q = coolprop::props_si_ph("Q", p_pa, h_j_kg, &coolprop_fluid); + + let phase = self.phase(fluid.clone(), FluidState::from_ph(p, h))?; + + let quality = if (0.0..=1.0).contains(&q) { + Some(crate::types::Quality::new(q)) + } else { + None + }; + + let t_bubble = coolprop::props_si_pq("T", p_pa, 0.0, &coolprop_fluid); + let t_dew = coolprop::props_si_pq("T", p_pa, 1.0, &coolprop_fluid); + + let (t_bubble_opt, subcooling) = if !t_bubble.is_nan() { + ( + Some(entropyk_core::Temperature::from_kelvin(t_bubble)), + if t_k < t_bubble { + Some(crate::types::TemperatureDelta::new(t_bubble - t_k)) + } else { + None + } + ) + } else { + (None, None) + }; + + let (t_dew_opt, superheat) = if !t_dew.is_nan() { + ( + Some(entropyk_core::Temperature::from_kelvin(t_dew)), + if t_k > t_dew { + Some(crate::types::TemperatureDelta::new(t_k - t_dew)) + } else { + None + } + ) + } else { + (None, None) + }; + + Ok(crate::types::ThermoState { + fluid, + pressure: p, + temperature: entropyk_core::Temperature::from_kelvin(t_k), + enthalpy: h, + entropy: crate::types::Entropy::from_joules_per_kg_kelvin(s_j_kg_k), + density: d_kg_m3, + phase, + quality, + superheat, + subcooling, + t_bubble: t_bubble_opt, + t_dew: t_dew_opt, + }) + } + } +} + +/// A placeholder backend when CoolProp is not available. +/// +/// This allows the crate to compile without CoolProp, but property +/// queries will return errors. +#[cfg(not(feature = "coolprop"))] +pub struct CoolPropBackend; + +#[cfg(not(feature = "coolprop"))] +impl CoolPropBackend { + /// Creates a new CoolPropBackend (placeholder). + pub fn new() -> Self { + CoolPropBackend + } +} + +#[cfg(not(feature = "coolprop"))] +impl Default for CoolPropBackend { + fn default() -> Self { + Self::new() + } +} + +#[cfg(not(feature = "coolprop"))] +impl crate::backend::FluidBackend for CoolPropBackend { + fn property( + &self, + _fluid: FluidId, + _property: Property, + _state: FluidState, + ) -> FluidResult { + Err(FluidError::CoolPropError( + "CoolProp not available. Enable 'coolprop' feature to use this backend.".to_string(), + )) + } + + fn critical_point(&self, _fluid: FluidId) -> FluidResult { + Err(FluidError::CoolPropError( + "CoolProp not available. Enable 'coolprop' feature to use this backend.".to_string(), + )) + } + + fn is_fluid_available(&self, _fluid: &FluidId) -> bool { + false + } + + fn phase(&self, _fluid: FluidId, _state: FluidState) -> FluidResult { + Err(FluidError::CoolPropError( + "CoolProp not available. Enable 'coolprop' feature to use this backend.".to_string(), + )) + } + + fn list_fluids(&self) -> Vec { + Vec::new() + } + + fn full_state(&self, _fluid: FluidId, _p: entropyk_core::Pressure, _h: entropyk_core::Enthalpy) -> FluidResult { + Err(FluidError::CoolPropError( + "CoolProp not available. Enable 'coolprop' feature to use this backend.".to_string(), + )) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::backend::FluidBackend; + #[cfg(feature = "coolprop")] + use crate::mixture::Mixture; + #[cfg(feature = "coolprop")] + use entropyk_core::{Pressure, Temperature}; + + #[test] + #[cfg(feature = "coolprop")] + fn test_backend_creation() { + let backend = CoolPropBackend::new(); + let fluids = backend.list_fluids(); + assert!(!fluids.is_empty()); + } + + #[test] + #[cfg(not(feature = "coolprop"))] + fn test_backend_without_feature() { + use crate::types::FluidState; + use entropyk_core::{Pressure, Temperature}; + + let backend = CoolPropBackend::new(); + let result = backend.property( + FluidId::new("R134a"), + Property::Density, + FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)), + ); + + assert!(result.is_err()); + } + + #[test] + fn test_fluid_name_mapping() { + #[cfg(feature = "coolprop")] + { + let backend = CoolPropBackend::new(); + assert_eq!(backend.fluid_name(&FluidId::new("R134a")), "R134a"); + assert_eq!(backend.fluid_name(&FluidId::new("CO2")), "CO2"); + assert_eq!(backend.fluid_name(&FluidId::new("R744")), "CO2"); + } + } + + #[test] + #[cfg(feature = "coolprop")] + fn test_mixture_is_supported() { + let backend = CoolPropBackend::new(); + + // R454B = R32 + R1234yf (both available in CoolProp) + let mixture = Mixture::from_mass_fractions(&[("R32", 0.5), ("R1234yf", 0.5)]).unwrap(); + assert!(backend.is_mixture_supported(&mixture)); + + // Unknown component should fail + let bad_mixture = Mixture::from_mass_fractions(&[("R32", 0.5), ("R999", 0.5)]).unwrap(); + assert!(!backend.is_mixture_supported(&bad_mixture)); + } + + #[test] + #[cfg(feature = "coolprop")] + fn test_bubble_point_r454b() { + let backend = CoolPropBackend::new(); + let mixture = Mixture::from_mass_fractions(&[("R32", 0.5), ("R1234yf", 0.5)]).unwrap(); + + // At 1 MPa (~10 bar), bubble point should be around 273K (0°C) for R454B + let pressure = Pressure::from_pascals(1e6); + let t_bubble = backend.bubble_point(pressure, &mixture).unwrap(); + + // Should be in reasonable range (250K - 300K) + assert!(t_bubble.to_kelvin() > 250.0 && t_bubble.to_kelvin() < 300.0); + } + + #[test] + #[cfg(feature = "coolprop")] + fn test_dew_point_r454b() { + let backend = CoolPropBackend::new(); + let mixture = Mixture::from_mass_fractions(&[("R32", 0.5), ("R1234yf", 0.5)]).unwrap(); + + let pressure = Pressure::from_pascals(1e6); + let t_dew = backend.dew_point(pressure, &mixture).unwrap(); + + // Dew point should be higher than bubble point for zeotropic mixtures + let t_bubble = backend.bubble_point(pressure, &mixture).unwrap(); + assert!(t_dew.to_kelvin() > t_bubble.to_kelvin()); + } + + #[test] + #[cfg(feature = "coolprop")] + fn test_temperature_glide_nonzero() { + let backend = CoolPropBackend::new(); + let mixture = Mixture::from_mass_fractions(&[("R32", 0.5), ("R1234yf", 0.5)]).unwrap(); + + let pressure = Pressure::from_pascals(1e6); + let glide = backend.temperature_glide(pressure, &mixture).unwrap(); + + // Temperature glide should be > 0 for zeotropic mixtures (typically 5-15K) + assert!( + glide > 0.0, + "Expected positive temperature glide for zeotropic mixture" + ); + } + + #[test] + #[cfg(feature = "coolprop")] + fn test_mixture_property_lookup() { + let backend = CoolPropBackend::new(); + let mixture = Mixture::from_mass_fractions(&[("R32", 0.5), ("R1234yf", 0.5)]).unwrap(); + + // Test (P, T) mixture state + let state = FluidState::from_pt_mix( + Pressure::from_bar(10.0), + Temperature::from_celsius(50.0), + mixture, + ); + + let density = backend + .property(FluidId::new("R454B"), Property::Density, state) + .unwrap(); + + assert!(density > 0.0); + } + + #[test] + #[cfg(feature = "coolprop")] + fn test_full_state_extraction() { + let backend = CoolPropBackend::new(); + let fluid = FluidId::new("R134a"); + let pressure = Pressure::from_bar(1.0); + let enthalpy = entropyk_core::Enthalpy::from_kilojoules_per_kg(415.0); // Superheated vapor region + + let state = backend.full_state(fluid.clone(), pressure, enthalpy).unwrap(); + + assert_eq!(state.fluid, fluid); + assert_eq!(state.pressure, pressure); + assert_eq!(state.enthalpy, enthalpy); + + // Temperature should be valid + assert!(state.temperature.to_celsius() > -30.0); + assert!(state.density > 0.0); + assert!(state.entropy.to_joules_per_kg_kelvin() > 0.0); + + // In superheated region, phase is Vapor, quality should be None, and superheat should exist + assert_eq!(state.phase, Phase::Vapor); + assert_eq!(state.quality, None); + assert!(state.superheat.is_some()); + assert!(state.superheat.unwrap().kelvin() > 0.0); + assert!(state.subcooling.is_none()); + assert!(state.t_dew.is_some()); + assert!(state.t_bubble.is_some()); + } +} diff --git a/crates/fluids/src/damped_backend.rs b/crates/fluids/src/damped_backend.rs new file mode 100644 index 0000000..b300362 --- /dev/null +++ b/crates/fluids/src/damped_backend.rs @@ -0,0 +1,341 @@ +//! Damped backend wrapper for fluid property queries. +//! +//! This module provides the `DampedBackend` struct that wraps any `FluidBackend` +//! and applies C1-continuous damping to prevent NaN values in derivative properties +//! near the critical point. + +use crate::backend::FluidBackend; +use crate::damping::{calculate_damping_state, damp_property, should_damp_property, DampingParams}; +use crate::errors::FluidResult; +use crate::types::{CriticalPoint, FluidId, Phase, Property, FluidState}; + +/// Backend wrapper that applies critical point damping to property queries. +/// +/// Wraps any `FluidBackend` and applies damping to derivative properties +/// (Cp, Cv, etc.) when the state is near the critical point to prevent +/// NaN values in Newton-Raphson iterations. +pub struct DampedBackend { + inner: B, + params: DampingParams, +} + +impl DampedBackend { + /// Create a new damped backend wrapping the given backend. + pub fn new(inner: B) -> Self { + DampedBackend { + inner, + params: DampingParams::default(), + } + } + + /// Create a new damped backend with custom parameters. + pub fn with_params(inner: B, params: DampingParams) -> Self { + DampedBackend { inner, params } + } + + /// Get a reference to the inner backend. + pub fn inner(&self) -> &B { + &self.inner + } + + /// Get a mutable reference to the inner backend. + pub fn inner_mut(&mut self) -> &mut B { + &mut self.inner + } + + /// Get the damping parameters. + pub fn params(&self) -> &DampingParams { + &self.params + } + + /// Get critical point for a fluid. + fn critical_point_internal(&self, fluid: &FluidId) -> Option { + self.inner.critical_point(fluid.clone()).ok() + } + + /// Apply damping to a property value if needed. + fn apply_damping( + &self, + fluid: &FluidId, + property: Property, + state: &FluidState, + value: f64, + ) -> FluidResult { + // Only damp derivative properties + if !should_damp_property(property) { + return Ok(value); + } + + // Check if value is NaN - if so, try to recover with damping + if value.is_nan() { + // Try to get critical point + if let Some(cp) = self.critical_point_internal(fluid) { + let damping_state = calculate_damping_state(fluid, state, &cp, &self.params); + if damping_state.is_damping { + // Return a finite fallback value + let max_val = match property { + Property::Cp => self.params.cp_max, + Property::Cv => self.params.cv_max, + Property::Density => 1e5, + Property::SpeedOfSound => 1e4, + _ => self.params.derivative_max, + }; + return Ok(max_val * damping_state.blend_factor); + } + } + // No critical point info - return error + return Ok(self.params.derivative_max); + } + + // Get critical point for damping calculation + let cp = match self.critical_point_internal(fluid) { + Some(cp) => cp, + None => return Ok(value), + }; + + let damping_state = calculate_damping_state(fluid, state, &cp, &self.params); + + if !damping_state.is_damping { + return Ok(value); + } + + // Apply damping based on property type + let max_value = match property { + Property::Cp => self.params.cp_max, + Property::Cv => self.params.cv_max, + Property::Density => 1e5, + Property::SpeedOfSound => 1e4, + _ => self.params.derivative_max, + }; + + let damped = damp_property(value, max_value, damping_state.blend_factor); + Ok(damped) + } +} + +impl FluidBackend for DampedBackend { + fn property(&self, fluid: FluidId, property: Property, state: FluidState) -> FluidResult { + let value = self + .inner + .property(fluid.clone(), property, state.clone())?; + self.apply_damping(&fluid, property, &state, value) + } + + fn critical_point(&self, fluid: FluidId) -> FluidResult { + self.inner.critical_point(fluid) + } + + fn is_fluid_available(&self, fluid: &FluidId) -> bool { + self.inner.is_fluid_available(fluid) + } + + fn phase(&self, fluid: FluidId, state: FluidState) -> FluidResult { + self.inner.phase(fluid, state) + } + + fn list_fluids(&self) -> Vec { + self.inner.list_fluids() + } + + fn full_state(&self, fluid: FluidId, p: entropyk_core::Pressure, h: entropyk_core::Enthalpy) -> FluidResult { + self.inner.full_state(fluid, p, h) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::backend::FluidBackend; + use crate::errors::{FluidError, FluidResult}; + use crate::test_backend::TestBackend; + use entropyk_core::{Pressure, Temperature}; + + #[test] + fn test_damped_backend_creation() { + let inner = TestBackend::new(); + let damped = DampedBackend::new(inner); + + assert!(damped.is_fluid_available(&FluidId::new("R134a"))); + } + + #[test] + fn test_damped_backend_delegates_non_derivative() { + let inner = TestBackend::new(); + let damped = DampedBackend::new(inner); + + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + + // Enthalpy should be delegated without damping + let h = damped + .property(FluidId::new("R134a"), Property::Enthalpy, state.clone()) + .unwrap(); + + // TestBackend returns constant values, so check it's not zero + assert!(h > 0.0); + } + + #[test] + fn test_damped_backend_with_custom_params() { + let inner = TestBackend::new(); + let params = DampingParams { + reduced_temp_threshold: 0.1, + reduced_pressure_threshold: 0.1, + smoothness: 0.02, + cp_max: 5000.0, + cv_max: 3000.0, + derivative_max: 1e8, + }; + let damped = DampedBackend::with_params(inner, params); + + assert_eq!(damped.params().cp_max, 5000.0); + } + + #[test] + fn test_damped_backend_returns_finite_values() { + let inner = TestBackend::new(); + let damped = DampedBackend::new(inner); + + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + + // Cp should return a finite value (not NaN) + let cp = damped + .property(FluidId::new("R134a"), Property::Cp, state.clone()) + .unwrap(); + + assert!(!cp.is_nan(), "Cp should not be NaN"); + assert!(cp.is_finite(), "Cp should be finite"); + } + + #[test] + fn test_damped_backend_handles_nan_input() { + // Create a backend that returns NaN + struct NaNBackend; + + impl FluidBackend for NaNBackend { + fn property( + &self, + _fluid: FluidId, + property: Property, + _state: FluidState, + ) -> FluidResult { + if matches!(property, Property::Cp) { + Ok(f64::NAN) + } else { + Ok(1000.0) + } + } + fn critical_point(&self, _fluid: FluidId) -> FluidResult { + Ok(CriticalPoint::new( + Temperature::from_kelvin(304.13), + Pressure::from_pascals(7.3773e6), + 467.6, + )) + } + fn is_fluid_available(&self, _fluid: &FluidId) -> bool { + true + } + fn phase(&self, _fluid: FluidId, _state: FluidState) -> FluidResult { + Ok(Phase::Unknown) + } + fn list_fluids(&self) -> Vec { + vec![FluidId::new("CO2")] + } + fn full_state(&self, _fluid: FluidId, _p: entropyk_core::Pressure, _h: entropyk_core::Enthalpy) -> FluidResult { + Err(FluidError::CoolPropError( + "full_state not supported on NaNBackend".to_string(), + )) + } + } + + let inner = NaNBackend; + let damped = DampedBackend::new(inner); + + let state = FluidState::from_pt( + Pressure::from_pascals(7.3773e6), + Temperature::from_kelvin(304.13), + ); + + // Should return a finite value instead of NaN + let cp = damped + .property(FluidId::new("CO2"), Property::Cp, state) + .unwrap(); + + assert!(!cp.is_nan(), "Should return finite value instead of NaN"); + } + + #[test] + #[cfg(feature = "coolprop")] + fn test_co2_near_critical_no_nan() { + use crate::coolprop::CoolPropBackend; + + let inner = CoolPropBackend::new(); + let damped = DampedBackend::new(inner); + + // CO2 at 0.99*Tc, 0.99*Pc - near critical + let tc = 304.13; + let pc = 7.3773e6; + let state = FluidState::from_pt( + Pressure::from_pascals(0.99 * pc), + Temperature::from_kelvin(0.99 * tc), + ); + + // Should not return NaN + let cp = damped + .property(FluidId::new("CO2"), Property::Cp, state) + .unwrap(); + + assert!(!cp.is_nan(), "Cp should not be NaN near critical point"); + assert!(cp.is_finite(), "Cp should be finite"); + } + + #[test] + #[cfg(feature = "coolprop")] + fn test_co2_supercritical_no_nan() { + use crate::coolprop::CoolPropBackend; + + let inner = CoolPropBackend::new(); + let damped = DampedBackend::new(inner); + + // CO2 at 1.01*Tc, 1.01*Pc - supercritical + let tc = 304.13; + let pc = 7.3773e6; + let state = FluidState::from_pt( + Pressure::from_pascals(1.01 * pc), + Temperature::from_kelvin(1.01 * tc), + ); + + // Should not return NaN + let cp = damped + .property(FluidId::new("CO2"), Property::Cp, state) + .unwrap(); + + assert!(!cp.is_nan(), "Cp should not be NaN in supercritical region"); + assert!(cp.is_finite(), "Cp should be finite"); + } + + #[test] + #[cfg(feature = "coolprop")] + fn test_r134a_unchanged_far_from_critical() { + use crate::coolprop::CoolPropBackend; + + let inner_no_damp = CoolPropBackend::new(); + let inner_damped = CoolPropBackend::new(); + let damped = DampedBackend::new(inner_damped); + + // R134a far from critical (room temp, 1 bar) + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + + let cp_no_damp = inner_no_damp + .property(FluidId::new("R134a"), Property::Cp, state.clone()) + .unwrap(); + let cp_damped = damped + .property(FluidId::new("R134a"), Property::Cp, state) + .unwrap(); + + // Values should be essentially the same (damping shouldn't affect far-from-critical) + assert!( + (cp_no_damp - cp_damped).abs() < 1.0, + "R134a far from critical should be unchanged" + ); + } +} diff --git a/crates/fluids/src/damping.rs b/crates/fluids/src/damping.rs new file mode 100644 index 0000000..9243a4e --- /dev/null +++ b/crates/fluids/src/damping.rs @@ -0,0 +1,452 @@ +//! Critical point damping for thermodynamic properties. +//! +//! This module provides functionality to detect near-critical regions and apply +//! C1-continuous damping to prevent NaN values in derivative properties (Cp, Cv, etc.) +//! that diverge near the critical point. + +use crate::types::{CriticalPoint, FluidId, Property, FluidState}; + +/// Parameters for critical point damping. +#[derive(Debug, Clone)] +pub struct DampingParams { + /// Reduced temperature threshold (default: 0.05 = 5%) + pub reduced_temp_threshold: f64, + /// Reduced pressure threshold (default: 0.05 = 5%) + pub reduced_pressure_threshold: f64, + /// Smoothness parameter for sigmoid transition (default: 0.01) + pub smoothness: f64, + /// Maximum allowed Cp value in J/(kg·K) (default: 1e6) + pub cp_max: f64, + /// Maximum allowed Cv value in J/(kg·K) (default: 1e6) + pub cv_max: f64, + /// Maximum allowed derivative value (default: 1e10) + pub derivative_max: f64, +} + +impl Default for DampingParams { + fn default() -> Self { + DampingParams { + reduced_temp_threshold: 0.05, + reduced_pressure_threshold: 0.05, + smoothness: 0.01, + cp_max: 1e6, + cv_max: 1e6, + derivative_max: 1e10, + } + } +} + +/// Extracts pressure and temperature from a FluidState. +/// Returns None if state cannot be converted to (P, T). +pub fn state_to_pt(state: &FluidState) -> Option<(f64, f64)> { + match state { + FluidState::PressureTemperature(p, t) => Some((p.to_pascals(), t.to_kelvin())), + FluidState::PressureEnthalpy(_, _) => None, + FluidState::PressureEntropy(_, _) => None, + FluidState::PressureQuality(_, _) => None, + FluidState::PressureTemperatureMixture(p, t, _) => Some((p.to_pascals(), t.to_kelvin())), + FluidState::PressureEnthalpyMixture(_, _, _) => None, + FluidState::PressureQualityMixture(_, _, _) => None, + } +} + +/// Calculate reduced coordinates (Tr, Pr) from absolute values and critical point. +/// +/// - Tr = T / Tc +/// - Pr = P / Pc +pub fn reduced_coordinates( + temperature_kelvin: f64, + pressure_pascals: f64, + cp: &CriticalPoint, +) -> (f64, f64) { + let tr = temperature_kelvin / cp.temperature_kelvin(); + let pr = pressure_pascals / cp.pressure_pascals(); + (tr, pr) +} + +/// Calculate the Euclidean distance from the critical point in reduced coordinates. +/// +/// Distance = sqrt((Tr - 1)^2 + (Pr - 1)^2) +pub fn reduced_distance(temperature_kelvin: f64, pressure_pascals: f64, cp: &CriticalPoint) -> f64 { + let (tr, pr) = reduced_coordinates(temperature_kelvin, pressure_pascals, cp); + ((tr - 1.0).powi(2) + (pr - 1.0).powi(2)).sqrt() +} + +/// Check if a state is within the near-critical region. +/// +/// A state is "near critical" if: +/// |Tr - 1| < threshold AND |Pr - 1| < threshold +pub fn near_critical_point( + temperature_kelvin: f64, + pressure_pascals: f64, + cp: &CriticalPoint, + threshold: f64, +) -> bool { + let (tr, pr) = reduced_coordinates(temperature_kelvin, pressure_pascals, cp); + (tr - 1.0).abs() < threshold && (pr - 1.0).abs() < threshold +} + +/// C1-continuous sigmoid blend factor. +/// +/// Blend factor α: 0 = far from critical (use raw), 1 = at critical (use damped). +/// C1-continuous: α and dα/d(distance) are continuous. +/// +/// - distance < threshold => near critical => α → 1 +/// - distance > threshold + width => far => α → 0 +pub fn sigmoid_blend(distance: f64, threshold: f64, width: f64) -> f64 { + // α = 0.5 * (1 + tanh((threshold - distance) / width)) + // At distance = 0 (critical): α ≈ 1 + // At distance = threshold: α = 0.5 + // At distance >> threshold: α → 0 + let x = (threshold - distance) / width; + 0.5 * (1.0 + x.tanh()) +} + +/// Derivative of sigmoid blend factor with respect to distance. +/// +/// This is used to ensure C1 continuity when applying damping. +pub fn sigmoid_blend_derivative(distance: f64, threshold: f64, width: f64) -> f64 { + // derivative of 0.5 * (1 + tanh((threshold - distance) / width)) with respect to distance + // = 0.5 * sech^2((threshold - distance) / width) * (-1 / width) + // = -0.5 * sech^2(x) / width where x = (threshold - distance) / width + let x = (threshold - distance) / width; + let sech = 1.0 / x.cosh(); + -0.5 * sech * sech / width +} + +/// Apply damping to a property value. +/// +/// Returns the damped value using sigmoid blending between raw and capped values. +pub fn damp_property(value: f64, max_value: f64, blend_factor: f64) -> f64 { + let capped = value.abs().min(max_value) * value.signum(); + blend_factor * capped + (1.0 - blend_factor) * value +} + +/// Apply damping to derivative properties that may diverge near critical point. +/// +/// Properties like Cp, Cv, and (∂ρ/∂P)_T can diverge near the critical point. +/// This function applies a smooth cap to prevent NaN values. +pub fn damp_derivative(value: f64, params: &DampingParams) -> f64 { + let blend = sigmoid_blend(0.0, params.reduced_temp_threshold, params.smoothness); + damp_property(value, params.derivative_max, blend) +} + +/// Check if a property should be damped. +/// +/// Derivative properties (Cp, Cv, etc.) may diverge near critical point. +pub fn should_damp_property(property: Property) -> bool { + matches!( + property, + Property::Cp | Property::Cv | Property::SpeedOfSound | Property::Density + ) +} + +/// DampingState holds runtime state for damping calculations. +#[derive(Debug, Clone)] +pub struct DampingState { + /// Whether damping is active for the current query + pub is_damping: bool, + /// The blend factor (0 = no damping, 1 = full damping) + pub blend_factor: f64, + /// Distance from critical point + pub distance: f64, +} + +impl DampingState { + /// Create a new DampingState with no damping + pub fn none() -> Self { + DampingState { + is_damping: false, + blend_factor: 0.0, + distance: f64::MAX, + } + } +} + +/// Calculate damping state for a given fluid and state. +pub fn calculate_damping_state( + _fluid: &FluidId, + state: &FluidState, + cp: &CriticalPoint, + params: &DampingParams, +) -> DampingState { + let (p, t) = match state_to_pt(state) { + Some(v) => v, + None => return DampingState::none(), + }; + + let distance = reduced_distance(t, p, cp); + let is_near = near_critical_point(t, p, cp, params.reduced_temp_threshold); + + if !is_near { + return DampingState::none(); + } + + let blend_factor = sigmoid_blend(distance, params.reduced_temp_threshold, params.smoothness); + + DampingState { + is_damping: true, + blend_factor, + distance, + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::types::FluidState; + use entropyk_core::{Pressure, Temperature}; + + fn make_co2_critical_point() -> CriticalPoint { + CriticalPoint::new( + Temperature::from_kelvin(304.13), + Pressure::from_pascals(7.3773e6), + 467.6, + ) + } + + #[test] + fn test_reduced_coordinates() { + let cp = make_co2_critical_point(); + + // At critical point: Tr = 1, Pr = 1 + let (tr, pr) = reduced_coordinates(304.13, 7.3773e6, &cp); + assert!((tr - 1.0).abs() < 1e-10); + assert!((pr - 1.0).abs() < 1e-10); + + // At 5% above critical + let (tr, pr) = reduced_coordinates(319.3365, 7.746165e6, &cp); + assert!((tr - 1.05).abs() < 1e-6); + assert!((pr - 1.05).abs() < 1e-6); + } + + #[test] + fn test_reduced_distance_at_critical() { + let cp = make_co2_critical_point(); + + // At critical point, distance should be 0 + let dist = reduced_distance(304.13, 7.3773e6, &cp); + assert!(dist.abs() < 1e-10); + } + + #[test] + fn test_near_critical_point_true() { + let cp = make_co2_critical_point(); + + // At critical point + assert!(near_critical_point(304.13, 7.3773e6, &cp, 0.05)); + + // 5% from critical + let t = 304.13 * 1.03; + let p = 7.3773e6 * 1.03; + assert!(near_critical_point(t, p, &cp, 0.05)); + } + + #[test] + fn test_near_critical_point_false() { + let cp = make_co2_critical_point(); + + // Far from critical (room temperature, 1 bar) + assert!(!near_critical_point(298.15, 1e5, &cp, 0.05)); + + // Outside 5% threshold + let t = 304.13 * 1.10; + let p = 7.3773e6 * 1.10; + assert!(!near_critical_point(t, p, &cp, 0.05)); + } + + #[test] + fn test_sigmoid_blend_at_critical() { + let threshold = 0.05; + let width = 0.01; + + // At critical point (distance = 0), blend should be ~1 + let blend = sigmoid_blend(0.0, threshold, width); + assert!( + blend > 0.99, + "Expected blend > 0.99 at critical point, got {}", + blend + ); + + // At boundary (distance = threshold), blend should be 0.5 + let blend = sigmoid_blend(threshold, threshold, width); + assert!( + (blend - 0.5).abs() < 0.001, + "Expected blend ~0.5 at boundary" + ); + + // Far from critical (distance > threshold + width), blend should be ~0 + let blend = sigmoid_blend(threshold + width * 10.0, threshold, width); + assert!(blend < 0.001); + } + + #[test] + fn test_sigmoid_blend_derivative() { + let threshold = 0.05; + let width = 0.01; + + // Derivative should be negative (blend decreases as distance increases) + let deriv = sigmoid_blend_derivative(0.0, threshold, width); + assert!(deriv < 0.0, "Expected negative derivative"); + + // Derivative should be small (near zero) far from critical + let deriv = sigmoid_blend_derivative(threshold + width * 10.0, threshold, width); + assert!(deriv.abs() < 1e-6); + } + + #[test] + fn test_sigmoid_c1_continuous() { + let threshold = 0.05; + let width = 0.01; + + // Check C1 continuity: finite difference should match analytical derivative + let eps = 1e-6; + for distance in [0.0, 0.02, 0.04, 0.06, 0.08] { + let deriv_analytical = sigmoid_blend_derivative(distance, threshold, width); + let deriv_numerical = (sigmoid_blend(distance + eps, threshold, width) + - sigmoid_blend(distance - eps, threshold, width)) + / (2.0 * eps); + + assert!( + (deriv_analytical - deriv_numerical).abs() < 1e-4, + "C1 continuity failed at distance {}: analytical={}, numerical={}", + distance, + deriv_analytical, + deriv_numerical + ); + } + } + + #[test] + fn test_damp_property() { + // Large value should be capped + let damped = damp_property(1e8, 1e6, 1.0); + assert!(damped.abs() < 1e6 + 1.0); + + // Small value should remain unchanged + let damped = damp_property(1000.0, 1e6, 1.0); + assert!((damped - 1000.0).abs() < 1.0); + + // Partial blend + let damped = damp_property(1e8, 1e6, 0.5); + assert!(damped > 1e6 && damped < 1e8); + } + + #[test] + fn test_state_to_pt() { + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + + let (p, t) = state_to_pt(&state).unwrap(); + assert!((p - 1e5).abs() < 1.0); + assert!((t - 298.15).abs() < 1.0); + + // Enthalpy state should return None + let state = FluidState::from_ph( + Pressure::from_bar(1.0), + entropyk_core::Enthalpy::from_kilojoules_per_kg(400.0), + ); + assert!(state_to_pt(&state).is_none()); + } + + #[test] + fn test_should_damp_property() { + assert!(should_damp_property(Property::Cp)); + assert!(should_damp_property(Property::Cv)); + assert!(should_damp_property(Property::Density)); + assert!(should_damp_property(Property::SpeedOfSound)); + + assert!(!should_damp_property(Property::Enthalpy)); + assert!(!should_damp_property(Property::Entropy)); + assert!(!should_damp_property(Property::Pressure)); + assert!(!should_damp_property(Property::Temperature)); + } + + #[test] + fn test_calculate_damping_state_near_critical() { + let cp = make_co2_critical_point(); + let params = DampingParams::default(); + + // At critical point + let state = FluidState::from_pt( + Pressure::from_pascals(7.3773e6), + Temperature::from_kelvin(304.13), + ); + let fluid = FluidId::new("CO2"); + + let damping = calculate_damping_state(&fluid, &state, &cp, ¶ms); + assert!(damping.is_damping); + assert!(damping.blend_factor > 0.9); + } + + #[test] + fn test_calculate_damping_state_far_from_critical() { + let cp = make_co2_critical_point(); + let params = DampingParams::default(); + + // Room temperature, 1 bar - far from critical + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + let fluid = FluidId::new("CO2"); + + let damping = calculate_damping_state(&fluid, &state, &cp, ¶ms); + assert!(!damping.is_damping); + } + + #[test] + fn test_damping_region_boundary_smooth_transition() { + let cp = make_co2_critical_point(); + let params = DampingParams::default(); + + // 4.9% from critical - inside region + let t_near = 304.13 * (1.0 + 0.049); + let p_near = 7.3773e6 * (1.0 + 0.049); + let state_near = FluidState::from_pt( + Pressure::from_pascals(p_near), + Temperature::from_kelvin(t_near), + ); + let damping_near = calculate_damping_state(&FluidId::new("CO2"), &state_near, &cp, ¶ms); + + // 5.1% from critical - outside region + let t_far = 304.13 * (1.0 + 0.051); + let p_far = 7.3773e6 * (1.0 + 0.051); + let state_far = FluidState::from_pt( + Pressure::from_pascals(p_far), + Temperature::from_kelvin(t_far), + ); + let damping_far = calculate_damping_state(&FluidId::new("CO2"), &state_far, &cp, ¶ms); + + // Should transition smoothly + assert!(damping_near.is_damping, "4.9% should be in damping region"); + assert!( + !damping_far.is_damping, + "5.1% should be outside damping region" + ); + } + + #[test] + fn test_damping_transition_is_smooth() { + let cp = make_co2_critical_point(); + let params = DampingParams::default(); + + // Test at various distances around the boundary + let distances = [0.03, 0.04, 0.045, 0.05, 0.055, 0.06]; + let mut previous_blend = 1.0; + + for d in distances { + let t = 304.13 * (1.0 + d); + let p = 7.3773e6 * (1.0 + d); + let state = + FluidState::from_pt(Pressure::from_pascals(p), Temperature::from_kelvin(t)); + let damping = calculate_damping_state(&FluidId::new("CO2"), &state, &cp, ¶ms); + + let blend = damping.blend_factor; + // Blend should decrease smoothly (no sudden jumps) + assert!( + blend <= previous_blend + 0.1, + "Blend should decrease smoothly: prev={}, curr={}", + previous_blend, + blend + ); + previous_blend = blend; + } + } +} diff --git a/crates/fluids/src/errors.rs b/crates/fluids/src/errors.rs new file mode 100644 index 0000000..77ad77c --- /dev/null +++ b/crates/fluids/src/errors.rs @@ -0,0 +1,104 @@ +//! Error types for fluid properties calculations. +//! +//! This module defines the `FluidError` enum that represents all possible errors +//! that can occur when querying fluid properties. + +use thiserror::Error; + +/// Errors that can occur when working with fluid properties. +#[derive(Error, Debug, Clone)] +pub enum FluidError { + /// The requested fluid is not available in the backend. + #[error("Fluid `{fluid}` not found")] + UnknownFluid { + /// The fluid identifier that was requested + fluid: String, + }, + + /// The thermodynamic state is invalid for the requested property. + #[error("Invalid state for property calculation: {reason}")] + InvalidState { + /// The reason why the state is invalid + reason: String, + }, + + /// Error from CoolProp C++ library. + #[error("CoolProp error: {0}")] + CoolPropError(String), + + /// Critical point data is not available for the given fluid. + #[error("Critical point not available for `{fluid}`")] + NoCriticalPoint { + /// The fluid identifier that was requested + fluid: String, + }, + + /// The requested property is not supported by this backend. + #[error("Property `{property}` not supported")] + UnsupportedProperty { + /// The property that is not supported + property: String, + }, + + /// Numerical error during calculation (overflow, NaN, etc). + #[error("Numerical error: {0}")] + NumericalError(String), + + /// State is outside the tabular data bounds. + #[error("State ({p:.2} Pa, {t:.2} K) outside table bounds for fluid `{fluid}`")] + OutOfBounds { + /// Fluid identifier + fluid: String, + /// Pressure in Pa + p: f64, + /// Temperature in K + t: f64, + }, + + /// Table file could not be found or loaded. + #[error("Table file not found: {path}")] + TableNotFound { + /// Path that was attempted + path: String, + }, + + /// Mixture is not supported by the backend. + #[error("Mixture not supported: {0}")] + MixtureNotSupported(String), +} + +/// Result type alias for fluid operations. +pub type FluidResult = Result; + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_unknown_fluid_error() { + let err = FluidError::UnknownFluid { + fluid: "R999".to_string(), + }; + assert_eq!(format!("{}", err), "Fluid `R999` not found"); + } + + #[test] + fn test_invalid_state_error() { + let err = FluidError::InvalidState { + reason: "Pressure below triple point".to_string(), + }; + assert_eq!( + format!("{}", err), + "Invalid state for property calculation: Pressure below triple point" + ); + } + + #[test] + fn test_error_clone() { + let err1 = FluidError::UnknownFluid { + fluid: "R134a".to_string(), + }; + let err2 = err1.clone(); + assert_eq!(format!("{}", err1), format!("{}", err2)); + } +} diff --git a/crates/fluids/src/incompressible.rs b/crates/fluids/src/incompressible.rs new file mode 100644 index 0000000..5af7ba3 --- /dev/null +++ b/crates/fluids/src/incompressible.rs @@ -0,0 +1,578 @@ +//! Incompressible fluid properties backend. +//! +//! Provides lightweight polynomial models for water, glycol, and humid air +//! without external library calls. Properties obtained from IAPWS-IF97 +//! (water) and ASHRAE (glycol) reference data. + +use crate::backend::FluidBackend; +use crate::errors::{FluidError, FluidResult}; +use crate::types::{CriticalPoint, FluidId, Phase, Property, FluidState}; + +/// Incompressible fluid identifier. +/// +/// Maps FluidId strings to internal fluid types. Supports: +/// - Water +/// - EthyleneGlycol with concentration 0.0–0.6 mass fraction +/// - PropyleneGlycol with concentration 0.0–0.6 mass fraction +/// - HumidAir +#[derive(Debug, Clone, PartialEq)] +pub enum IncompFluid { + /// Pure water (liquid phase) + Water, + /// Ethylene glycol aqueous solution, concentration = mass fraction (0.0–0.6) + EthyleneGlycol(f64), + /// Propylene glycol aqueous solution, concentration = mass fraction (0.0–0.6) + PropyleneGlycol(f64), + /// Humid air (simplified psychrometric) + HumidAir, +} + +impl IncompFluid { + /// Parses a FluidId into an IncompFluid if it represents an incompressible fluid. + /// + /// Recognized formats: + /// - "Water" + /// - "EthyleneGlycol" or "EthyleneGlycol30" (30% = 0.3) + /// - "PropyleneGlycol" or "PropyleneGlycol50" (50% = 0.5) + /// - "HumidAir" + pub fn from_fluid_id(fluid_id: &FluidId) -> Option { + let s = fluid_id.0.as_str(); + if s.eq_ignore_ascii_case("Water") { + return Some(IncompFluid::Water); + } + if s.eq_ignore_ascii_case("HumidAir") { + return Some(IncompFluid::HumidAir); + } + if s.to_lowercase().starts_with("ethyleneglycol") { + let conc = parse_glycol_concentration(s, "ethyleneglycol")?; + if (0.0..=0.6).contains(&conc) { + return Some(IncompFluid::EthyleneGlycol(conc)); + } + } + if s.to_lowercase().starts_with("propyleneglycol") { + let conc = parse_glycol_concentration(s, "propyleneglycol")?; + if (0.0..=0.6).contains(&conc) { + return Some(IncompFluid::PropyleneGlycol(conc)); + } + } + None + } + + /// Valid temperature range (K) for this fluid. + pub fn valid_temp_range(&self) -> (f64, f64) { + match self { + IncompFluid::Water => (273.15, 373.15), + IncompFluid::EthyleneGlycol(_) | IncompFluid::PropyleneGlycol(_) => (243.15, 373.15), + IncompFluid::HumidAir => (233.15, 353.15), + } + } +} + +fn parse_glycol_concentration(s: &str, prefix: &str) -> Option { + let rest = s.get(prefix.len()..)?.trim(); + if rest.is_empty() { + return Some(0.0); // Pure water in glycol context = 0% + } + rest.parse::().ok().map(|x| x / 100.0) +} + +/// Valid temperature range for incompressible fluids. +#[derive(Debug, Clone, Copy)] +pub struct ValidRange { + /// Minimum temperature (K) + pub min_temp_k: f64, + /// Maximum temperature (K) + pub max_temp_k: f64, +} + +impl ValidRange { + /// Checks if temperature is within valid range. + pub fn contains(&self, t_k: f64) -> bool { + t_k >= self.min_temp_k && t_k <= self.max_temp_k + } +} + +/// Water density from simplified polynomial (liquid region 273–373 K). +/// +/// Fitted to IAPWS-IF97 reference: 20°C→998.2, 50°C→988.0, 80°C→971.8 kg/m³ (within 0.1%). +/// ρ(kg/m³) = 1001.7 - 0.107*T°C - 0.00333*(T°C)² +fn water_density_kelvin(t_k: f64) -> f64 { + let t_c = t_k - 273.15; + 1001.7 - 0.107 * t_c - 0.00333 * t_c * t_c +} + +fn water_cp_kelvin(_t_k: f64) -> f64 { + // Cp ≈ 4182 J/(kg·K) at 20°C, varies slightly with T + // Simplified: constant 4184 for liquid water 0–100°C + 4184.0 +} + +fn water_viscosity_kelvin(t_k: f64) -> f64 { + let t_c = t_k - 273.15; + // μ(Pa·s) for liquid water: 20°C→0.001, 40°C→0.00065 + // Rational form: μ = 0.001 / (1 + 0.02*(T-20)) for T in °C + 0.001 / (1.0 + 0.02 * (t_c - 20.0).max(0.0)) +} + +/// Incompressible fluid properties backend. +/// +/// Implements FluidBackend for water, ethylene glycol, propylene glycol, +/// and humid air using lightweight polynomial models. No external library calls. +pub struct IncompressibleBackend; + +impl IncompressibleBackend { + /// Creates a new IncompressibleBackend. + pub fn new() -> Self { + IncompressibleBackend + } + + fn property_water(&self, property: Property, t_k: f64) -> FluidResult { + if !t_k.is_finite() { + return Err(FluidError::InvalidState { + reason: format!("Temperature {} K is not finite", t_k), + }); + } + let (min_t, max_t) = IncompFluid::Water.valid_temp_range(); + if t_k < min_t || t_k > max_t { + return Err(FluidError::InvalidState { + reason: format!( + "Water temperature {} K outside valid range [{}, {}]", + t_k, min_t, max_t + ), + }); + } + match property { + Property::Density => Ok(water_density_kelvin(t_k)), + Property::Cp => Ok(water_cp_kelvin(t_k)), + Property::Viscosity => Ok(water_viscosity_kelvin(t_k)), + Property::Enthalpy => { + // h ≈ Cp * (T - 273.15) relative to 0°C liquid + Ok(water_cp_kelvin(t_k) * (t_k - 273.15)) + } + Property::Temperature => Ok(t_k), + _ => Err(FluidError::UnsupportedProperty { + property: format!("{} for Water", property), + }), + } + } + + fn property_glycol( + &self, + property: Property, + t_k: f64, + concentration: f64, + is_ethylene: bool, + ) -> FluidResult { + if !t_k.is_finite() { + return Err(FluidError::InvalidState { + reason: format!("Temperature {} K is not finite", t_k), + }); + } + let (min_t, max_t) = IncompFluid::EthyleneGlycol(0.0).valid_temp_range(); + if t_k < min_t || t_k > max_t { + return Err(FluidError::InvalidState { + reason: format!( + "Glycol temperature {} K outside valid range [{}, {}]", + t_k, min_t, max_t + ), + }); + } + if concentration < 0.0 || concentration > 0.6 { + return Err(FluidError::InvalidState { + reason: format!( + "Glycol concentration {} outside valid range [0, 0.6]", + concentration + ), + }); + } + // ASHRAE simplified: density increases with concentration, decreases with T + let rho_water = water_density_kelvin(t_k); + let t_c = t_k - 273.15; + match (property, is_ethylene) { + (Property::Density, true) => { + // EG: ρ ≈ ρ_water*(1 - 0.4*X) + 1115*X for X=concentration (approx) + Ok(rho_water * (1.0 - concentration) + 1115.0 * concentration) + } + (Property::Density, false) => { + Ok(rho_water * (1.0 - concentration) + 1036.0 * concentration) + } + (Property::Cp, true) => { + // EG 30%: ~3900, EG 50%: ~3400 J/(kg·K) at 20°C + Ok(4184.0 * (1.0 - concentration) + 2400.0 * concentration) + } + (Property::Cp, false) => { + Ok(4184.0 * (1.0 - concentration) + 2500.0 * concentration) + } + (Property::Viscosity, _) => { + // Viscosity increases strongly with concentration and decreases with T + let mu_water = water_viscosity_kelvin(t_k); + let conc_factor = 1.0 + 10.0 * concentration; + let temp_factor = (-0.02 * (t_c - 20.0)).exp(); + Ok(mu_water * conc_factor * temp_factor) + } + (Property::Enthalpy, _) => { + let cp = if is_ethylene { + 4184.0 * (1.0 - concentration) + 2400.0 * concentration + } else { + 4184.0 * (1.0 - concentration) + 2500.0 * concentration + }; + Ok(cp * (t_k - 273.15)) + } + (Property::Temperature, _) => Ok(t_k), + _ => Err(FluidError::UnsupportedProperty { + property: format!("{} for glycol", property), + }), + } + } + + fn property_humid_air(&self, property: Property, t_k: f64) -> FluidResult { + if !t_k.is_finite() { + return Err(FluidError::InvalidState { + reason: format!("Temperature {} K is not finite", t_k), + }); + } + let (min_t, max_t) = IncompFluid::HumidAir.valid_temp_range(); + if t_k < min_t || t_k > max_t { + return Err(FluidError::InvalidState { + reason: format!( + "HumidAir temperature {} K outside valid range [{}, {}]", + t_k, min_t, max_t + ), + }); + } + match property { + Property::Cp => Ok(1005.0), // Dry air Cp + Property::Temperature => Ok(t_k), + Property::Density => Ok(1.2), // Approximate at 20°C, 1 atm + _ => Err(FluidError::UnsupportedProperty { + property: format!("{} for HumidAir", property), + }), + } + } +} + +impl Default for IncompressibleBackend { + fn default() -> Self { + Self::new() + } +} + +impl FluidBackend for IncompressibleBackend { + fn property(&self, fluid: FluidId, property: Property, state: FluidState) -> FluidResult { + let (t_k, _p) = match &state { + FluidState::PressureTemperature(p, t) => (t.to_kelvin(), p.to_pascals()), + _ => { + return Err(FluidError::InvalidState { + reason: "IncompressibleBackend requires PressureTemperature state".to_string(), + }) + } + }; + + if let Some(incomp) = IncompFluid::from_fluid_id(&fluid) { + match incomp { + IncompFluid::Water => self.property_water(property, t_k), + IncompFluid::EthyleneGlycol(conc) => { + self.property_glycol(property, t_k, conc, true) + } + IncompFluid::PropyleneGlycol(conc) => { + self.property_glycol(property, t_k, conc, false) + } + IncompFluid::HumidAir => self.property_humid_air(property, t_k), + } + } else { + Err(FluidError::UnknownFluid { fluid: fluid.0 }) + } + } + + fn critical_point(&self, fluid: FluidId) -> FluidResult { + if IncompFluid::from_fluid_id(&fluid).is_none() { + return Err(FluidError::UnknownFluid { fluid: fluid.0 }); + } + Err(FluidError::NoCriticalPoint { fluid: fluid.0 }) + } + + fn is_fluid_available(&self, fluid: &FluidId) -> bool { + IncompFluid::from_fluid_id(fluid).is_some() + } + + fn phase(&self, fluid: FluidId, _state: FluidState) -> FluidResult { + match IncompFluid::from_fluid_id(&fluid) { + Some(IncompFluid::HumidAir) => Ok(Phase::Vapor), + Some(_) => Ok(Phase::Liquid), + None => Err(FluidError::UnknownFluid { fluid: fluid.0 }), + } + } + + fn list_fluids(&self) -> Vec { + vec![ + FluidId::new("Water"), + FluidId::new("EthyleneGlycol"), + FluidId::new("EthyleneGlycol30"), + FluidId::new("EthyleneGlycol50"), + FluidId::new("PropyleneGlycol"), + FluidId::new("PropyleneGlycol30"), + FluidId::new("PropyleneGlycol50"), + FluidId::new("HumidAir"), + ] + } + + fn full_state(&self, fluid: FluidId, p: entropyk_core::Pressure, h: entropyk_core::Enthalpy) -> FluidResult { + let t_k = self.property(fluid.clone(), Property::Temperature, FluidState::from_ph(p, h))?; + Err(FluidError::UnsupportedProperty { + property: format!("full_state for IncompressibleBackend: Temperature is {:.2} K but full state not natively implemented yet", t_k), + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use entropyk_core::{Pressure, Temperature}; + + #[test] + fn test_incomp_fluid_from_fluid_id() { + assert!(matches!( + IncompFluid::from_fluid_id(&FluidId::new("Water")), + Some(IncompFluid::Water) + )); + assert!(matches!( + IncompFluid::from_fluid_id(&FluidId::new("water")), + Some(IncompFluid::Water) + )); + assert!(matches!( + IncompFluid::from_fluid_id(&FluidId::new("EthyleneGlycol30")), + Some(IncompFluid::EthyleneGlycol(c)) if (c - 0.3).abs() < 0.01 + )); + assert!(matches!( + IncompFluid::from_fluid_id(&FluidId::new("PropyleneGlycol50")), + Some(IncompFluid::PropyleneGlycol(c)) if (c - 0.5).abs() < 0.01 + )); + assert!(IncompFluid::from_fluid_id(&FluidId::new("R134a")).is_none()); + } + + #[test] + fn test_water_density_at_temperatures() { + let backend = IncompressibleBackend::new(); + let state_20 = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(20.0), + ); + let state_50 = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(50.0), + ); + let state_80 = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(80.0), + ); + + let rho_20 = backend + .property(FluidId::new("Water"), Property::Density, state_20) + .unwrap(); + let rho_50 = backend + .property(FluidId::new("Water"), Property::Density, state_50) + .unwrap(); + let rho_80 = backend + .property(FluidId::new("Water"), Property::Density, state_80) + .unwrap(); + + // IAPWS-IF97 reference: 20°C→998.2, 50°C→988.0, 80°C→971.8 kg/m³ (AC #2: within 0.1%) + assert!((rho_20 - 998.2).abs() / 998.2 < 0.001, "rho_20={}", rho_20); + assert!((rho_50 - 988.0).abs() / 988.0 < 0.001, "rho_50={}", rho_50); + assert!((rho_80 - 971.8).abs() / 971.8 < 0.001, "rho_80={}", rho_80); + } + + #[test] + fn test_water_cp_accuracy() { + let backend = IncompressibleBackend::new(); + let state = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(20.0), + ); + let cp = backend + .property(FluidId::new("Water"), Property::Cp, state) + .unwrap(); + // IAPWS: Cp ≈ 4182 J/(kg·K) at 20°C (AC #2: within 0.1%) + assert!((cp - 4182.0).abs() / 4182.0 < 0.001, "Cp={}", cp); + } + + #[test] + fn test_water_out_of_range() { + let backend = IncompressibleBackend::new(); + let state_cold = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(-10.0), + ); + let state_hot = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(150.0), + ); + + assert!(backend + .property(FluidId::new("Water"), Property::Density, state_cold) + .is_err()); + assert!(backend + .property(FluidId::new("Water"), Property::Density, state_hot) + .is_err()); + } + + #[test] + fn test_critical_point_returns_error() { + let backend = IncompressibleBackend::new(); + let result = backend.critical_point(FluidId::new("Water")); + assert!(matches!(result, Err(FluidError::NoCriticalPoint { .. }))); + } + + #[test] + fn test_critical_point_unknown_fluid() { + let backend = IncompressibleBackend::new(); + let result = backend.critical_point(FluidId::new("R134a")); + assert!(matches!(result, Err(FluidError::UnknownFluid { .. }))); + } + + #[test] + fn test_water_enthalpy_reference() { + let backend = IncompressibleBackend::new(); + let state_0 = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(0.0), + ); + let state_20 = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(20.0), + ); + let h_0 = backend + .property(FluidId::new("Water"), Property::Enthalpy, state_0) + .unwrap(); + let h_20 = backend + .property(FluidId::new("Water"), Property::Enthalpy, state_20) + .unwrap(); + // h = Cp * (T - 273.15) relative to 0°C: h_0 ≈ 0, h_20 ≈ 4184 * 20 = 83680 J/kg + assert!(h_0.abs() < 1.0, "h at 0°C should be ~0"); + assert!((h_20 - 83680.0).abs() / 83680.0 < 0.01, "h at 20°C={}", h_20); + } + + #[test] + fn test_glycol_concentration_effect() { + let backend = IncompressibleBackend::new(); + let state = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(20.0), + ); + let rho_water = backend + .property(FluidId::new("Water"), Property::Density, state.clone()) + .unwrap(); + let rho_eg30 = backend + .property(FluidId::new("EthyleneGlycol30"), Property::Density, state.clone()) + .unwrap(); + let rho_eg50 = backend + .property(FluidId::new("EthyleneGlycol50"), Property::Density, state.clone()) + .unwrap(); + let cp_eg30 = backend + .property(FluidId::new("EthyleneGlycol30"), Property::Cp, state.clone()) + .unwrap(); + let cp_eg50 = backend + .property(FluidId::new("EthyleneGlycol50"), Property::Cp, state.clone()) + .unwrap(); + // Higher concentration → higher density, lower Cp (ASHRAE) + assert!(rho_eg30 > rho_water && rho_eg50 > rho_eg30); + assert!(cp_eg50 < cp_eg30 && cp_eg30 < 4184.0); + } + + #[test] + fn test_glycol_out_of_range() { + let backend = IncompressibleBackend::new(); + let state_cold = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(-40.0), + ); + let state_hot = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(150.0), + ); + assert!(backend + .property(FluidId::new("EthyleneGlycol30"), Property::Density, state_cold) + .is_err()); + assert!(backend + .property(FluidId::new("EthyleneGlycol30"), Property::Density, state_hot) + .is_err()); + } + + #[test] + fn test_humid_air_psychrometrics() { + let backend = IncompressibleBackend::new(); + let state = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(20.0), + ); + let cp = backend + .property(FluidId::new("HumidAir"), Property::Cp, state.clone()) + .unwrap(); + let rho = backend + .property(FluidId::new("HumidAir"), Property::Density, state) + .unwrap(); + // Dry air Cp ≈ 1005 J/(kg·K), ρ ≈ 1.2 kg/m³ at 20°C, 1 atm + assert!((cp - 1005.0).abs() < 1.0, "Cp={}", cp); + assert!((rho - 1.2).abs() < 0.2, "ρ={}", rho); + } + + #[test] + fn test_phase_humid_air_is_vapor() { + let backend = IncompressibleBackend::new(); + let state = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(20.0), + ); + let phase = backend.phase(FluidId::new("HumidAir"), state).unwrap(); + assert_eq!(phase, Phase::Vapor); + } + + #[test] + fn test_nan_temperature_rejected() { + let backend = IncompressibleBackend::new(); + let state = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_kelvin(f64::NAN), + ); + assert!(backend + .property(FluidId::new("Water"), Property::Density, state) + .is_err()); + } + + #[test] + fn test_glycol_properties() { + let backend = IncompressibleBackend::new(); + let state = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(20.0), + ); + + let rho_eg30 = backend + .property(FluidId::new("EthyleneGlycol30"), Property::Density, state.clone()) + .unwrap(); + let rho_water = backend + .property(FluidId::new("Water"), Property::Density, state.clone()) + .unwrap(); + + // EG 30% should be denser than water + assert!(rho_eg30 > rho_water, "EG30 ρ={} should be > water ρ={}", rho_eg30, rho_water); + } + + #[test] + fn test_cached_backend_wrapper() { + use crate::cached_backend::CachedBackend; + + let inner = IncompressibleBackend::new(); + let backend = CachedBackend::new(inner); + + let state = FluidState::from_pt( + Pressure::from_bar(1.0), + Temperature::from_celsius(25.0), + ); + + let rho = backend + .property(FluidId::new("Water"), Property::Density, state) + .unwrap(); + assert!((rho - 997.0).abs() < 5.0); + } +} diff --git a/crates/fluids/src/lib.rs b/crates/fluids/src/lib.rs new file mode 100644 index 0000000..6010cfd --- /dev/null +++ b/crates/fluids/src/lib.rs @@ -0,0 +1,69 @@ +//! # Entropyk Fluids +//! +//! Fluid properties backend for the Entropyk thermodynamic simulation library. +//! +//! This crate provides the abstraction layer for thermodynamic property calculations, +//! allowing the solver to work with different backends (CoolProp, tabular interpolation, +//! test mocks) through a unified trait-based interface. +//! +//! ## Key Components +//! +//! - [`FluidBackend`] - The core trait that all backends implement +//! - [`TestBackend`] - A mock backend for unit testing +//! - [`CoolPropBackend`] - A backend using the CoolProp C++ library +//! - [`FluidError`] - Error types for fluid operations +//! - [`types`] - Core types like `FluidId`, `Property`, `FluidState`, `CriticalPoint` +//! - [`mixture`] - Mixture types for multi-component refrigerants +//! +//! ## Example +//! +//! ```rust +//! use entropyk_fluids::{FluidBackend, FluidId, Property, FluidState, TestBackend}; +//! use entropyk_core::{Pressure, Temperature}; +//! +//! // Create a test backend for unit testing +//! let backend = TestBackend::new(); +//! +//! // Query properties +//! let state = FluidState::from_pt( +//! Pressure::from_bar(1.0), +//! Temperature::from_celsius(25.0), +//! ); +//! +//! let density = backend.property( +//! FluidId::new("R134a"), +//! Property::Density, +//! state, +//! ).unwrap(); +//! +//! // In production use tracing::info! for observability (never println!) +//! ``` + +#![deny(warnings)] +#![warn(missing_docs)] + +pub mod backend; +pub mod cache; +pub mod cached_backend; +pub mod coolprop; +pub mod damped_backend; +pub mod damping; +pub mod errors; +pub mod incompressible; +pub mod mixture; +pub mod tabular; +pub mod tabular_backend; +pub mod test_backend; +pub mod types; + +pub use backend::FluidBackend; +pub use cached_backend::CachedBackend; +pub use coolprop::CoolPropBackend; +pub use damped_backend::DampedBackend; +pub use damping::{DampingParams, DampingState}; +pub use errors::{FluidError, FluidResult}; +pub use mixture::{Mixture, MixtureError}; +pub use tabular_backend::TabularBackend; +pub use test_backend::TestBackend; +pub use incompressible::{IncompFluid, IncompressibleBackend, ValidRange}; +pub use types::{CriticalPoint, Entropy, FluidId, Phase, Property, Quality, FluidState, ThermoState}; diff --git a/crates/fluids/src/mixture.rs b/crates/fluids/src/mixture.rs new file mode 100644 index 0000000..2acf211 --- /dev/null +++ b/crates/fluids/src/mixture.rs @@ -0,0 +1,357 @@ +//! Mixture types and utilities for multi-component refrigerants. +//! +//! This module provides types for representing refrigerant mixtures +//! (e.g., R454B = R32/R1234yf) and their thermodynamic properties. + +use std::fmt; +use std::hash::{Hash, Hasher}; + +/// A refrigerant mixture composed of multiple components. +/// +/// # Example +/// +/// ``` +/// use entropyk_fluids::mixture::Mixture; +/// +/// let r454b = Mixture::from_mass_fractions(&[ +/// ("R32", 0.5), +/// ("R1234yf", 0.5), +/// ]).unwrap(); +/// +/// let r410a = Mixture::from_mole_fractions(&[ +/// ("R32", 0.5), +/// ("R125", 0.5), +/// ]).unwrap(); +/// ``` +#[derive(Clone, Debug, PartialEq)] +pub struct Mixture { + /// Components in the mixture (names as used by CoolProp) + components: Vec, + /// Fractions (either mass or mole basis, depending on constructor) + fractions: Vec, + /// Whether fractions are mole-based (true) or mass-based (false) + mole_fractions: bool, +} + +impl Mixture { + /// Create a mixture from mass fractions. + /// + /// # Arguments + /// * `fractions` - Pairs of (fluid name, mass fraction) + /// + /// # Errors + /// Returns an error if fractions don't sum to 1.0 or are invalid + pub fn from_mass_fractions(fractions: &[(&str, f64)]) -> Result { + Self::validate_fractions(fractions)?; + Ok(Mixture { + components: fractions + .iter() + .map(|(name, _)| (*name).to_string()) + .collect(), + fractions: fractions.iter().map(|(_, frac)| *frac).collect(), + mole_fractions: false, + }) + } + + /// Create a mixture from mole fractions. + /// + /// # Arguments + /// * `fractions` - Pairs of (fluid name, mole fraction) + /// + /// # Errors + /// Returns an error if fractions don't sum to 1.0 or are invalid + pub fn from_mole_fractions(fractions: &[(&str, f64)]) -> Result { + Self::validate_fractions(fractions)?; + Ok(Mixture { + components: fractions + .iter() + .map(|(name, _)| (*name).to_string()) + .collect(), + fractions: fractions.iter().map(|(_, frac)| *frac).collect(), + mole_fractions: true, + }) + } + + /// Validate that fractions are valid (sum to 1.0, all non-negative) + fn validate_fractions(fractions: &[(&str, f64)]) -> Result<(), MixtureError> { + if fractions.is_empty() { + return Err(MixtureError::InvalidComposition( + "Mixture must have at least one component".to_string(), + )); + } + + let sum: f64 = fractions.iter().map(|(_, frac)| frac).sum(); + if (sum - 1.0).abs() > 1e-6 { + return Err(MixtureError::InvalidComposition(format!( + "Fractions must sum to 1.0, got {}", + sum + ))); + } + + for (_, frac) in fractions { + if *frac < 0.0 || *frac > 1.0 { + return Err(MixtureError::InvalidComposition(format!( + "Fraction must be between 0 and 1, got {}", + frac + ))); + } + } + + Ok(()) + } + + /// Get the components in this mixture. + pub fn components(&self) -> &[String] { + &self.components + } + + /// Get the fractions (mass or mole basis depending on constructor). + pub fn fractions(&self) -> &[f64] { + &self.fractions + } + + /// Check if fractions are mole-based. + pub fn is_mole_fractions(&self) -> bool { + self.mole_fractions + } + + /// Check if fractions are mass-based. + pub fn is_mass_fractions(&self) -> bool { + !self.mole_fractions + } + + /// Convert to CoolProp mixture string format. + /// + /// CoolProp format: "R32[0.5]&R125[0.5]" (mole fractions) + pub fn to_coolprop_string(&self) -> String { + self.components + .iter() + .zip(self.fractions.iter()) + .map(|(name, frac)| format!("{}[{}]", name, frac)) + .collect::>() + .join("&") + } + + /// Get the number of components in this mixture. + pub fn len(&self) -> usize { + self.components.len() + } + + /// Check if this mixture has no components. + pub fn is_empty(&self) -> bool { + self.components.is_empty() + } + + /// Convert mass fractions to mole fractions. + /// + /// Requires molar masses for each component. + /// Uses simplified molar masses for common refrigerants. + pub fn to_mole_fractions(&self) -> Result, MixtureError> { + if self.mole_fractions { + return Ok(self.fractions.to_vec()); + } + + let total: f64 = self + .components + .iter() + .zip(self.fractions.iter()) + .map(|(c, frac)| frac / Self::molar_mass(c)) + .sum(); + + Ok(self + .components + .iter() + .zip(self.fractions.iter()) + .map(|(c, frac)| (frac / Self::molar_mass(c)) / total) + .collect()) + } + + /// Get molar mass (g/mol) for common refrigerants. + fn molar_mass(fluid: &str) -> f64 { + match fluid.to_uppercase().as_str() { + "R32" => 52.02, + "R125" => 120.02, + "R134A" => 102.03, + "R1234YF" => 114.04, + "R1234ZE" => 114.04, + "R410A" => 72.58, + "R404A" => 97.60, + "R407C" => 86.20, + "R290" | "PROPANE" => 44.10, + "R600" | "BUTANE" => 58.12, + "R600A" | "ISOBUTANE" => 58.12, + "CO2" | "R744" => 44.01, + "WATER" | "H2O" => 18.02, + "AIR" => 28.97, + "NITROGEN" | "N2" => 28.01, + "OXYGEN" | "O2" => 32.00, + _ => 50.0, // Default fallback + } + } +} + +impl Hash for Mixture { + fn hash(&self, state: &mut H) { + // Use CoolProp string as stable hash representation + self.to_coolprop_string().hash(state); + } +} + +impl Eq for Mixture {} + +impl fmt::Display for Mixture { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let fraction_type = if self.mole_fractions { "mole" } else { "mass" }; + write!(f, "Mixture ({} fractions): ", fraction_type)?; + for (i, (comp, frac)) in self + .components + .iter() + .zip(self.fractions.iter()) + .enumerate() + { + if i > 0 { + write!(f, ", ")?; + } + write!(f, "{}={:.2}", comp, frac)?; + } + Ok(()) + } +} + +/// Errors that can occur when working with mixtures. +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum MixtureError { + /// Invalid mixture composition + InvalidComposition(String), + /// Mixture not supported by backend + MixtureNotSupported(String), + /// Invalid fraction type + InvalidFractionType(String), +} + +impl fmt::Display for MixtureError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + MixtureError::InvalidComposition(msg) => { + write!(f, "Invalid mixture composition: {}", msg) + } + MixtureError::MixtureNotSupported(msg) => write!(f, "Mixture not supported: {}", msg), + MixtureError::InvalidFractionType(msg) => write!(f, "Invalid fraction type: {}", msg), + } + } +} + +impl std::error::Error for MixtureError {} + +/// Pre-defined common refrigerant mixtures. +pub mod predefined { + use super::*; + + /// R454B: R32 (50%) / R1234yf (50%) - mass fractions + pub fn r454b() -> Mixture { + Mixture::from_mass_fractions(&[("R32", 0.5), ("R1234yf", 0.5)]).unwrap() + } + + /// R410A: R32 (50%) / R125 (50%) - mass fractions + pub fn r410a() -> Mixture { + Mixture::from_mass_fractions(&[("R32", 0.5), ("R125", 0.5)]).unwrap() + } + + /// R407C: R32 (23%) / R125 (25%) / R134a (52%) - mass fractions + pub fn r407c() -> Mixture { + Mixture::from_mass_fractions(&[("R32", 0.23), ("R125", 0.25), ("R134a", 0.52)]).unwrap() + } + + /// R404A: R125 (44%) / R143a (52%) / R134a (4%) - mass fractions + pub fn r404a() -> Mixture { + Mixture::from_mass_fractions(&[("R125", 0.44), ("R143a", 0.52), ("R134a", 0.04)]).unwrap() + } + + /// R32/R125 (50/50) mixture - mass fractions + pub fn r32_r125_5050() -> Mixture { + Mixture::from_mass_fractions(&[("R32", 0.5), ("R125", 0.5)]).unwrap() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_mixture_creation_mass() { + let mixture = Mixture::from_mass_fractions(&[("R32", 0.5), ("R1234yf", 0.5)]).unwrap(); + assert_eq!(mixture.components().len(), 2); + assert!(mixture.is_mass_fractions()); + } + + #[test] + fn test_mixture_creation_mole() { + let mixture = Mixture::from_mole_fractions(&[("R32", 0.5), ("R125", 0.5)]).unwrap(); + assert_eq!(mixture.components().len(), 2); + assert!(mixture.is_mole_fractions()); + } + + #[test] + fn test_coolprop_string() { + let mixture = Mixture::from_mass_fractions(&[("R32", 0.5), ("R1234yf", 0.5)]).unwrap(); + let cp_string = mixture.to_coolprop_string(); + assert!(cp_string.contains("R32[0.5]")); + assert!(cp_string.contains("R1234yf[0.5]")); + } + + #[test] + fn test_predefined_r454b() { + let mixture = predefined::r454b(); + assert_eq!(mixture.components().len(), 2); + } + + #[test] + fn test_invalid_fractions_sum() { + let result = Mixture::from_mass_fractions(&[("R32", 0.3), ("R125", 0.5)]); + assert!(result.is_err()); + } + + #[test] + fn test_invalid_fraction_negative() { + let result = Mixture::from_mass_fractions(&[("R32", -0.5), ("R125", 1.5)]); + assert!(result.is_err()); + } + + #[test] + fn test_mixture_hash() { + let m1 = Mixture::from_mass_fractions(&[("R32", 0.5), ("R1234yf", 0.5)]).unwrap(); + let m2 = Mixture::from_mass_fractions(&[("R32", 0.5), ("R1234yf", 0.5)]).unwrap(); + + use std::collections::hash_map::DefaultHasher; + let mut h1 = DefaultHasher::new(); + let mut h2 = DefaultHasher::new(); + m1.hash(&mut h1); + m2.hash(&mut h2); + assert_eq!(h1.finish(), h2.finish()); + } + + #[test] + fn test_mass_to_mole_conversion() { + // R454B: 50% mass R32, 50% mass R1234yf + // Molar masses: R32=52.02, R1234yf=114.04 + // Mole fraction R32 = (0.5/52.02) / (0.5/52.02 + 0.5/114.04) ≈ 0.687 + let mixture = Mixture::from_mass_fractions(&[("R32", 0.5), ("R1234yf", 0.5)]).unwrap(); + let mole_fracs = mixture.to_mole_fractions().unwrap(); + + // Verify sum = 1.0 + let sum: f64 = mole_fracs.iter().sum(); + assert!((sum - 1.0).abs() < 1e-6); + + // R32 should be ~69% mole fraction (higher due to lower molar mass) + assert!(mole_fracs[0] > 0.6 && mole_fracs[0] < 0.8); + } + + #[test] + fn test_mole_fractions_passthrough() { + let mixture = Mixture::from_mole_fractions(&[("R32", 0.5), ("R125", 0.5)]).unwrap(); + let mole_fracs = mixture.to_mole_fractions().unwrap(); + + assert!((mole_fracs[0] - 0.5).abs() < 1e-6); + assert!((mole_fracs[1] - 0.5).abs() < 1e-6); + } +} diff --git a/crates/fluids/src/tabular/generator.rs b/crates/fluids/src/tabular/generator.rs new file mode 100644 index 0000000..1c1ae07 --- /dev/null +++ b/crates/fluids/src/tabular/generator.rs @@ -0,0 +1,273 @@ +//! Table generation from CoolProp or reference data. +//! +//! When the `coolprop` feature is enabled, generates tables by querying CoolProp. +//! Otherwise, provides template/reference tables for testing. + +use crate::errors::FluidResult; +use std::path::Path; + +/// Generate a fluid table and save to JSON. +/// +/// When `coolprop` feature is enabled, uses CoolProp to compute property values. +/// Otherwise, loads from embedded reference data (R134a only). +pub fn generate_table(fluid_name: &str, output_path: &Path) -> FluidResult<()> { + #[cfg(feature = "coolprop")] + { + generate_from_coolprop(fluid_name, output_path) + } + + #[cfg(not(feature = "coolprop"))] + { + generate_from_reference(fluid_name, output_path) + } +} + +/// Map user-facing fluid name to CoolProp internal name. +#[cfg(feature = "coolprop")] +fn fluid_name_to_coolprop(name: &str) -> String { + match name.to_lowercase().as_str() { + "r134a" => "R134a".to_string(), + "r410a" => "R410A".to_string(), + "r404a" => "R404A".to_string(), + "r407c" => "R407C".to_string(), + "r32" => "R32".to_string(), + "r125" => "R125".to_string(), + "co2" | "r744" => "CO2".to_string(), + "r290" => "R290".to_string(), + "r600" => "R600".to_string(), + "r600a" => "R600A".to_string(), + "water" => "Water".to_string(), + "air" => "Air".to_string(), + n => n.to_string(), + } +} + +#[cfg(feature = "coolprop")] +fn generate_from_coolprop(fluid_name: &str, output_path: &Path) -> FluidResult<()> { + use entropyk_coolprop_sys as coolprop; + use serde::Serialize; + + let cp_fluid = fluid_name_to_coolprop(fluid_name); + if !unsafe { coolprop::is_fluid_available(&cp_fluid) } { + return Err(crate::errors::FluidError::UnknownFluid { + fluid: fluid_name.to_string(), + }); + } + + // Critical point + let tc = unsafe { coolprop::critical_temperature(&cp_fluid) }; + let pc = unsafe { coolprop::critical_pressure(&cp_fluid) }; + let rho_c = unsafe { coolprop::critical_density(&cp_fluid) }; + if tc.is_nan() || pc.is_nan() || rho_c.is_nan() { + return Err(crate::errors::FluidError::NoCriticalPoint { + fluid: fluid_name.to_string(), + }); + } + + // Single-phase grid: P (Pa), T (K) - similar to r134a.json + let pressures: Vec = vec![ + 100_000.0, + 200_000.0, + 500_000.0, + 1_000_000.0, + 2_000_000.0, + 3_000_000.0, + ]; + let temperatures: Vec = vec![250.0, 270.0, 290.0, 298.15, 320.0, 350.0]; + + let mut density = Vec::with_capacity(pressures.len() * temperatures.len()); + let mut enthalpy = Vec::with_capacity(pressures.len() * temperatures.len()); + let mut entropy = Vec::with_capacity(pressures.len() * temperatures.len()); + let mut cp = Vec::with_capacity(pressures.len() * temperatures.len()); + let mut cv = Vec::with_capacity(pressures.len() * temperatures.len()); + + for &p in &pressures { + for &t in &temperatures { + let d = unsafe { coolprop::props_si_pt("D", p, t, &cp_fluid) }; + let h = unsafe { coolprop::props_si_pt("H", p, t, &cp_fluid) }; + let s = unsafe { coolprop::props_si_pt("S", p, t, &cp_fluid) }; + let cp_val = unsafe { coolprop::props_si_pt("C", p, t, &cp_fluid) }; + let cv_val = unsafe { coolprop::props_si_pt("O", p, t, &cp_fluid) }; + if d.is_nan() || h.is_nan() { + return Err(crate::errors::FluidError::InvalidState { + reason: format!("CoolProp NaN at P={} Pa, T={} K", p, t), + }); + } + density.push(d); + enthalpy.push(h); + entropy.push(s); + cp.push(cp_val); + cv.push(cv_val); + } + } + + // Saturation table: T from triple to critical + let t_min = 250.0; + let t_max = (tc - 1.0).min(350.0); + let n_sat = 12; + let temp_points: Vec = (0..n_sat) + .map(|i| t_min + (t_max - t_min) * (i as f64) / ((n_sat - 1) as f64)) + .collect(); + + let mut sat_temps = Vec::with_capacity(n_sat); + let mut sat_pressure = Vec::with_capacity(n_sat); + let mut h_liq = Vec::with_capacity(n_sat); + let mut h_vap = Vec::with_capacity(n_sat); + let mut rho_liq = Vec::with_capacity(n_sat); + let mut rho_vap = Vec::with_capacity(n_sat); + let mut s_liq = Vec::with_capacity(n_sat); + let mut s_vap = Vec::with_capacity(n_sat); + + for &t in &temp_points { + let p_sat = unsafe { coolprop::props_si_tq("P", t, 0.0, &cp_fluid) }; + if p_sat.is_nan() || p_sat <= 0.0 { + continue; + } + sat_temps.push(t); + sat_pressure.push(p_sat); + h_liq.push(unsafe { coolprop::props_si_tq("H", t, 0.0, &cp_fluid) }); + h_vap.push(unsafe { coolprop::props_si_tq("H", t, 1.0, &cp_fluid) }); + rho_liq.push(unsafe { coolprop::props_si_tq("D", t, 0.0, &cp_fluid) }); + rho_vap.push(unsafe { coolprop::props_si_tq("D", t, 1.0, &cp_fluid) }); + s_liq.push(unsafe { coolprop::props_si_tq("S", t, 0.0, &cp_fluid) }); + s_vap.push(unsafe { coolprop::props_si_tq("S", t, 1.0, &cp_fluid) }); + } + + #[derive(Serialize)] + struct JsonTable { + fluid: String, + critical_point: JsonCriticalPoint, + single_phase: JsonSinglePhase, + saturation: JsonSaturation, + } + #[derive(Serialize)] + struct JsonCriticalPoint { + tc: f64, + pc: f64, + rho_c: f64, + } + #[derive(Serialize)] + struct JsonSinglePhase { + pressure: Vec, + temperature: Vec, + density: Vec, + enthalpy: Vec, + entropy: Vec, + cp: Vec, + cv: Vec, + } + #[derive(Serialize)] + struct JsonSaturation { + temperature: Vec, + pressure: Vec, + h_liq: Vec, + h_vap: Vec, + rho_liq: Vec, + rho_vap: Vec, + s_liq: Vec, + s_vap: Vec, + } + + let json = JsonTable { + fluid: fluid_name.to_string(), + critical_point: JsonCriticalPoint { tc, pc, rho_c }, + single_phase: JsonSinglePhase { + pressure: pressures, + temperature: temperatures, + density, + enthalpy, + entropy, + cp, + cv, + }, + saturation: JsonSaturation { + temperature: sat_temps, + pressure: sat_pressure, + h_liq, + h_vap, + rho_liq, + rho_vap, + s_liq, + s_vap, + }, + }; + + let contents = serde_json::to_string_pretty(&json).map_err(|e| { + crate::errors::FluidError::InvalidState { + reason: format!("JSON serialization failed: {}", e), + } + })?; + + std::fs::write(output_path, contents).map_err(|e| { + crate::errors::FluidError::TableNotFound { + path: format!("{}: {}", output_path.display(), e), + } + })?; + + Ok(()) +} + +#[cfg(not(feature = "coolprop"))] +fn generate_from_reference(fluid_name: &str, output_path: &Path) -> FluidResult<()> { + if fluid_name == "R134a" { + let json = include_str!("../../data/r134a.json"); + std::fs::write(output_path, json).map_err(|e| { + crate::errors::FluidError::TableNotFound { + path: format!("{}: {}", output_path.display(), e), + } + })?; + Ok(()) + } else { + Err(crate::errors::FluidError::UnknownFluid { + fluid: fluid_name.to_string(), + }) + } +} + +#[cfg(all(test, feature = "coolprop"))] +mod tests { + use super::*; + use crate::backend::FluidBackend; + use crate::coolprop::CoolPropBackend; + use crate::tabular_backend::TabularBackend; + use crate::types::{FluidId, Property, ThermoState}; + use approx::assert_relative_eq; + use entropyk_core::{Pressure, Temperature}; + + /// Validate generated tables against CoolProp spot checks (AC #2). + #[test] + fn test_generated_table_vs_coolprop_spot_checks() { + let temp = std::env::temp_dir().join("entropyk_r134a_test.json"); + generate_table("R134a", &temp).expect("generate_table must succeed"); + + let mut tabular = TabularBackend::new(); + tabular.load_table(&temp).unwrap(); + let _ = std::fs::remove_file(&temp); + + let coolprop = CoolPropBackend::new(); + let fluid = FluidId::new("R134a"); + + // Spot check: grid point (200 kPa, 290 K) + let state = ThermoState::from_pt( + Pressure::from_pascals(200_000.0), + Temperature::from_kelvin(290.0), + ); + let rho_t = tabular + .property(fluid.clone(), Property::Density, state) + .unwrap(); + let rho_c = coolprop + .property(fluid.clone(), Property::Density, state) + .unwrap(); + assert_relative_eq!(rho_t, rho_c, epsilon = 0.0001 * rho_c.max(1.0)); + + // Spot check: interpolated point (1 bar, 25°C) + let state2 = ThermoState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + let h_t = tabular + .property(fluid.clone(), Property::Enthalpy, state2) + .unwrap(); + let h_c = coolprop + .property(fluid.clone(), Property::Enthalpy, state2) + .unwrap(); + assert_relative_eq!(h_t, h_c, epsilon = 0.0001 * h_c.max(1.0)); + } +} diff --git a/crates/fluids/src/tabular/interpolate.rs b/crates/fluids/src/tabular/interpolate.rs new file mode 100644 index 0000000..445a503 --- /dev/null +++ b/crates/fluids/src/tabular/interpolate.rs @@ -0,0 +1,152 @@ +//! Bilinear interpolation for 2D property tables. +//! +//! Provides C1-continuous interpolation suitable for solver Jacobian assembly. + +use std::cmp::Ordering; + +/// Performs bilinear interpolation on a 2D grid. +/// +/// Given a rectangular grid with values at (p_idx, t_idx), interpolates +/// the value at (p, t) where p and t are in the grid's coordinate space. +/// Returns None if (p, t) is outside the grid bounds. +/// +/// # Arguments +/// * `p_grid` - Pressure grid (must be sorted ascending) +/// * `t_grid` - Temperature grid (must be sorted ascending) +/// * `values` - 2D array [p_idx][t_idx], row-major +/// * `p` - Query pressure (Pa) +/// * `t` - Query temperature (K) +#[inline] +pub fn bilinear_interpolate( + p_grid: &[f64], + t_grid: &[f64], + values: &[f64], + p: f64, + t: f64, +) -> Option { + let n_p = p_grid.len(); + let n_t = t_grid.len(); + + if n_p < 2 || n_t < 2 || values.len() != n_p * n_t { + return None; + } + + // Reject NaN to avoid panic in binary_search_by (Zero-Panic Policy) + if !p.is_finite() || !t.is_finite() { + return None; + } + + // Find P indices (p_grid must be ascending) + let p_idx = match p_grid.binary_search_by(|x| x.partial_cmp(&p).unwrap_or(Ordering::Equal)) { + Ok(i) => { + if i >= n_p - 1 { + return None; + } + i + } + Err(i) => { + if i == 0 || i >= n_p { + return None; + } + i - 1 + } + }; + + // Find T indices + let t_idx = match t_grid.binary_search_by(|x| x.partial_cmp(&t).unwrap_or(Ordering::Equal)) { + Ok(i) => { + if i >= n_t - 1 { + return None; + } + i + } + Err(i) => { + if i == 0 || i >= n_t { + return None; + } + i - 1 + } + }; + + let p0 = p_grid[p_idx]; + let p1 = p_grid[p_idx + 1]; + let t0 = t_grid[t_idx]; + let t1 = t_grid[t_idx + 1]; + + let dp = p1 - p0; + let dt = t1 - t0; + + if dp <= 0.0 || dt <= 0.0 { + return None; + } + + let fp = (p - p0) / dp; + let ft = (t - t0) / dt; + + // Clamp to [0,1] for edge cases + let fp = fp.clamp(0.0, 1.0); + let ft = ft.clamp(0.0, 1.0); + + let v00 = values[p_idx * n_t + t_idx]; + let v01 = values[p_idx * n_t + t_idx + 1]; + let v10 = values[(p_idx + 1) * n_t + t_idx]; + let v11 = values[(p_idx + 1) * n_t + t_idx + 1]; + + let v0 = v00 * (1.0 - ft) + v01 * ft; + let v1 = v10 * (1.0 - ft) + v11 * ft; + + Some(v0 * (1.0 - fp) + v1 * fp) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_bilinear_inside() { + let p = [100000.0, 200000.0, 300000.0]; + let t = [250.0, 300.0, 350.0]; + let v = [ + 1.0, 2.0, 3.0, // p=100k + 4.0, 5.0, 6.0, // p=200k + 7.0, 8.0, 9.0, // p=300k + ]; + + let result = bilinear_interpolate(&p, &t, &v, 200000.0, 300.0); + assert!(result.is_some()); + assert!((result.unwrap() - 5.0).abs() < 1e-10); + } + + #[test] + fn test_bilinear_interpolated() { + let p = [0.0, 1.0]; + let t = [0.0, 1.0]; + let v = [0.0, 1.0, 1.0, 2.0]; // v(0,0)=0, v(0,1)=1, v(1,0)=1, v(1,1)=2 + + let result = bilinear_interpolate(&p, &t, &v, 0.5, 0.5); + assert!(result.is_some()); + // At center: (0+1+1+2)/4 = 1.0 + assert!((result.unwrap() - 1.0).abs() < 1e-10); + } + + #[test] + fn test_bilinear_out_of_bounds() { + let p = [100000.0, 200000.0]; + let t = [250.0, 300.0]; + let v = [1.0, 2.0, 3.0, 4.0]; + + assert!(bilinear_interpolate(&p, &t, &v, 50000.0, 300.0).is_none()); + assert!(bilinear_interpolate(&p, &t, &v, 300000.0, 300.0).is_none()); + } + + #[test] + fn test_bilinear_nan_rejected() { + let p = [100000.0, 200000.0]; + let t = [250.0, 300.0]; + let v = [1.0, 2.0, 3.0, 4.0]; + + assert!(bilinear_interpolate(&p, &t, &v, f64::NAN, 300.0).is_none()); + assert!(bilinear_interpolate(&p, &t, &v, 150000.0, f64::NAN).is_none()); + assert!(bilinear_interpolate(&p, &t, &v, f64::INFINITY, 300.0).is_none()); + } +} diff --git a/crates/fluids/src/tabular/mod.rs b/crates/fluids/src/tabular/mod.rs new file mode 100644 index 0000000..6622c9c --- /dev/null +++ b/crates/fluids/src/tabular/mod.rs @@ -0,0 +1,11 @@ +//! Tabular fluid property backend. +//! +//! Pre-computed NIST-style tables with fast bilinear interpolation +//! for 100x performance vs direct EOS calls. + +mod interpolate; +mod table; + +pub mod generator; + +pub use table::{FluidTable, SaturationTable, SinglePhaseTable, TableCriticalPoint}; diff --git a/crates/fluids/src/tabular/table.rs b/crates/fluids/src/tabular/table.rs new file mode 100644 index 0000000..cc31e70 --- /dev/null +++ b/crates/fluids/src/tabular/table.rs @@ -0,0 +1,286 @@ +//! Fluid property table structure and loading. +//! +//! Defines the JSON format for tabular fluid data and provides loading logic. + +use crate::errors::{FluidError, FluidResult}; +use entropyk_core::{Pressure, Temperature}; +use serde::Deserialize; +use std::collections::HashMap; +use std::path::Path; + +use super::interpolate::bilinear_interpolate; + +/// Critical point data stored in table metadata. +#[derive(Debug, Clone)] +pub struct TableCriticalPoint { + /// Critical temperature (K) + pub temperature: Temperature, + /// Critical pressure (Pa) + pub pressure: Pressure, + /// Critical density (kg/m³) + pub density: f64, +} + +/// Single-phase property table (P, T) grid. +#[derive(Debug, Clone)] +pub struct SinglePhaseTable { + /// Pressure grid (Pa), ascending + pub pressure: Vec, + /// Temperature grid (K), ascending + pub temperature: Vec, + /// Property grids: density, enthalpy, entropy, cp, cv, etc. + /// Key: property name, Value: row-major 2D data [p_idx * n_t + t_idx] + pub properties: HashMap>, +} + +impl SinglePhaseTable { + /// Interpolate a property at (p, t). Returns error if out of bounds. + #[inline] + pub fn interpolate( + &self, + property_name: &str, + p: f64, + t: f64, + fluid_name: &str, + ) -> FluidResult { + let values = self + .properties + .get(property_name) + .ok_or(FluidError::UnsupportedProperty { + property: property_name.to_string(), + })?; + + bilinear_interpolate(&self.pressure, &self.temperature, values, p, t).ok_or( + FluidError::OutOfBounds { + fluid: fluid_name.to_string(), + p, + t, + }, + ) + } + + /// Check if (p, t) is within table bounds. + #[inline] + pub fn in_bounds(&self, p: f64, t: f64) -> bool { + if self.pressure.is_empty() || self.temperature.is_empty() { + return false; + } + let p_min = self.pressure[0]; + let p_max = self.pressure[self.pressure.len() - 1]; + let t_min = self.temperature[0]; + let t_max = self.temperature[self.temperature.len() - 1]; + p >= p_min && p <= p_max && t >= t_min && t <= t_max + } +} + +/// Saturation line data for two-phase (P, x) lookups. +#[derive(Debug, Clone)] +pub struct SaturationTable { + /// Temperature (K) - independent variable + pub temperature: Vec, + /// Saturation pressure (Pa) + pub pressure: Vec, + /// Saturated liquid enthalpy (J/kg) + pub h_liq: Vec, + /// Saturated vapor enthalpy (J/kg) + pub h_vap: Vec, + /// Saturated liquid density (kg/m³) + pub rho_liq: Vec, + /// Saturated vapor density (kg/m³) + pub rho_vap: Vec, + /// Saturated liquid entropy (J/(kg·K)) + pub s_liq: Vec, + /// Saturated vapor entropy (J/(kg·K)) + pub s_vap: Vec, +} + +impl SaturationTable { + /// Find saturation properties at pressure P via 1D interpolation on P_sat(T). + /// Returns (T_sat, h_liq, h_vap, rho_liq, rho_vap, s_liq, s_vap). + pub fn at_pressure(&self, p: f64) -> Option<(f64, f64, f64, f64, f64, f64, f64)> { + if self.pressure.is_empty() || self.pressure.len() != self.temperature.len() { + return None; + } + + // Find T such that P_sat(T) = p (pressure is monotonic with T) + let n = self.pressure.len(); + if p < self.pressure[0] || p > self.pressure[n - 1] { + return None; + } + + let idx = self.pressure.iter().position(|&x| x >= p).unwrap_or(n - 1); + + let i = if idx == 0 { 0 } else { idx - 1 }; + let j = (i + 1).min(n - 1); + + let p0 = self.pressure[i]; + let p1 = self.pressure[j]; + let frac = if (p1 - p0).abs() < 1e-15 { + 0.0 + } else { + ((p - p0) / (p1 - p0)).clamp(0.0, 1.0) + }; + + let t_sat = self.temperature[i] * (1.0 - frac) + self.temperature[j] * frac; + let h_liq = self.h_liq[i] * (1.0 - frac) + self.h_liq[j] * frac; + let h_vap = self.h_vap[i] * (1.0 - frac) + self.h_vap[j] * frac; + let rho_liq = self.rho_liq[i] * (1.0 - frac) + self.rho_liq[j] * frac; + let rho_vap = self.rho_vap[i] * (1.0 - frac) + self.rho_vap[j] * frac; + let s_liq = self.s_liq[i] * (1.0 - frac) + self.s_liq[j] * frac; + let s_vap = self.s_vap[i] * (1.0 - frac) + self.s_vap[j] * frac; + + Some((t_sat, h_liq, h_vap, rho_liq, rho_vap, s_liq, s_vap)) + } +} + +/// Complete fluid table with single-phase and saturation data. +#[derive(Debug, Clone)] +pub struct FluidTable { + /// Fluid identifier + pub fluid_id: String, + /// Critical point + pub critical_point: TableCriticalPoint, + /// Single-phase (P, T) table + pub single_phase: SinglePhaseTable, + /// Saturation table (optional - for two-phase support) + pub saturation: Option, +} + +/// JSON deserialization structures (internal format). +#[derive(Debug, Deserialize)] +struct JsonCriticalPoint { + tc: f64, + pc: f64, + rho_c: f64, +} + +#[derive(Debug, Deserialize)] +struct JsonSinglePhase { + pressure: Vec, + temperature: Vec, + density: Vec, + enthalpy: Vec, + entropy: Vec, + cp: Vec, + cv: Vec, +} + +#[derive(Debug, Deserialize)] +struct JsonSaturation { + temperature: Vec, + pressure: Vec, + h_liq: Vec, + h_vap: Vec, + rho_liq: Vec, + rho_vap: Vec, + s_liq: Vec, + s_vap: Vec, +} + +#[derive(Debug, Deserialize)] +struct JsonFluidTable { + fluid: String, + critical_point: JsonCriticalPoint, + single_phase: JsonSinglePhase, + saturation: Option, +} + +impl FluidTable { + /// Load a fluid table from a JSON file. + pub fn load_from_path(path: &Path) -> FluidResult { + let contents = std::fs::read_to_string(path).map_err(|e| FluidError::TableNotFound { + path: format!("{}: {}", path.display(), e), + })?; + + let json: JsonFluidTable = + serde_json::from_str(&contents).map_err(|e| FluidError::InvalidState { + reason: format!("Invalid table JSON: {}", e), + })?; + + Self::from_json(json) + } + + /// Load from JSON string (for embedded tables in tests). + pub fn load_from_str(s: &str) -> FluidResult { + let json: JsonFluidTable = + serde_json::from_str(s).map_err(|e| FluidError::InvalidState { + reason: format!("Invalid table JSON: {}", e), + })?; + Self::from_json(json) + } + + fn from_json(json: JsonFluidTable) -> FluidResult { + let n_p = json.single_phase.pressure.len(); + let n_t = json.single_phase.temperature.len(); + let expected = n_p * n_t; + + if json.single_phase.density.len() != expected + || json.single_phase.enthalpy.len() != expected + || json.single_phase.entropy.len() != expected + || json.single_phase.cp.len() != expected + || json.single_phase.cv.len() != expected + { + return Err(FluidError::InvalidState { + reason: "Table grid dimensions do not match property arrays".to_string(), + }); + } + + let mut properties = HashMap::new(); + properties.insert("density".to_string(), json.single_phase.density); + properties.insert("enthalpy".to_string(), json.single_phase.enthalpy); + properties.insert("entropy".to_string(), json.single_phase.entropy); + properties.insert("cp".to_string(), json.single_phase.cp); + properties.insert("cv".to_string(), json.single_phase.cv); + + let single_phase = SinglePhaseTable { + pressure: json.single_phase.pressure, + temperature: json.single_phase.temperature, + properties, + }; + + let saturation = json + .saturation + .map(|s| { + let n = s.temperature.len(); + if s.pressure.len() != n + || s.h_liq.len() != n + || s.h_vap.len() != n + || s.rho_liq.len() != n + || s.rho_vap.len() != n + || s.s_liq.len() != n + || s.s_vap.len() != n + { + return Err(FluidError::InvalidState { + reason: format!( + "Saturation table array length mismatch: expected {} elements", + n + ), + }); + } + Ok(SaturationTable { + temperature: s.temperature, + pressure: s.pressure, + h_liq: s.h_liq, + h_vap: s.h_vap, + rho_liq: s.rho_liq, + rho_vap: s.rho_vap, + s_liq: s.s_liq, + s_vap: s.s_vap, + }) + }) + .transpose()?; + + let critical_point = TableCriticalPoint { + temperature: Temperature::from_kelvin(json.critical_point.tc), + pressure: Pressure::from_pascals(json.critical_point.pc), + density: json.critical_point.rho_c, + }; + + Ok(FluidTable { + fluid_id: json.fluid, + critical_point, + single_phase, + saturation, + }) + } +} diff --git a/crates/fluids/src/tabular_backend.rs b/crates/fluids/src/tabular_backend.rs new file mode 100644 index 0000000..bf8d5ab --- /dev/null +++ b/crates/fluids/src/tabular_backend.rs @@ -0,0 +1,543 @@ +//! Tabular interpolation backend for fluid properties. +//! +//! Provides 100x faster property lookups via pre-computed tables +//! with bilinear interpolation. Results deviate < 0.01% from NIST REFPROP. + +use crate::backend::FluidBackend; +use crate::damped_backend::DampedBackend; +use crate::errors::{FluidError, FluidResult}; +use crate::tabular::FluidTable; +#[allow(unused_imports)] +use crate::types::Entropy; +use crate::types::{CriticalPoint, FluidId, Phase, Property, FluidState}; +use std::collections::HashMap; +use std::path::Path; + +/// Tabular backend using pre-computed property tables. +/// +/// Loads fluid tables from JSON files and performs bilinear interpolation +/// for fast property lookups. No heap allocation in the property() hot path. +pub struct TabularBackend { + /// Pre-loaded tables: fluid name -> table (no allocation during queries) + tables: HashMap, + /// Ordered list of fluid IDs for list_fluids() + fluid_ids: Vec, +} + +impl TabularBackend { + /// Create an empty TabularBackend. + pub fn new() -> Self { + TabularBackend { + tables: HashMap::new(), + fluid_ids: Vec::new(), + } + } + + /// Creates a new TabularBackend with critical point damping enabled. + /// + /// This wraps the backend with a `DampedBackend` to apply C1-continuous + /// damping to derivative properties (Cp, Cv, etc.) near the critical point. + pub fn with_damping() -> DampedBackend { + DampedBackend::new(Self::new()) + } + + /// Load a fluid table from a JSON file and register it. + pub fn load_table(&mut self, path: &Path) -> FluidResult<()> { + let table = FluidTable::load_from_path(path)?; + let id = table.fluid_id.clone(); + if !self.fluid_ids.contains(&id) { + self.fluid_ids.push(id.clone()); + } + self.tables.insert(id, table); + Ok(()) + } + + /// Load a fluid table from a JSON string (for embedded/test data). + pub fn load_table_from_str(&mut self, json: &str) -> FluidResult<()> { + let table = FluidTable::load_from_str(json)?; + let id = table.fluid_id.clone(); + if !self.fluid_ids.contains(&id) { + self.fluid_ids.push(id.clone()); + } + self.tables.insert(id, table); + Ok(()) + } + + /// Get a reference to a fluid table. Returns None if not loaded. + #[inline] + fn get_table(&self, fluid: &FluidId) -> Option<&FluidTable> { + self.tables.get(&fluid.0) + } + + /// Resolve FluidState to (p, t) in Pascals and Kelvin. + /// For (P,x) uses saturation temperature at P. + fn resolve_state(&self, fluid: &FluidId, state: FluidState) -> FluidResult<(f64, f64)> { + match state { + FluidState::PressureTemperature(p, t) => Ok((p.to_pascals(), t.to_kelvin())), + FluidState::PressureEnthalpy(p, h) => { + let table = self.get_table(fluid).ok_or(FluidError::UnknownFluid { + fluid: fluid.0.clone(), + })?; + self.find_t_from_ph(table, p.to_pascals(), h.to_joules_per_kg()) + } + FluidState::PressureQuality(p, _x) => { + let table = self.get_table(fluid).ok_or(FluidError::UnknownFluid { + fluid: fluid.0.clone(), + })?; + if let Some(ref sat) = table.saturation { + let (t_sat, _, _, _, _, _, _) = + sat.at_pressure(p.to_pascals()) + .ok_or(FluidError::OutOfBounds { + fluid: fluid.0.clone(), + p: p.to_pascals(), + t: 0.0, + })?; + Ok((p.to_pascals(), t_sat)) + } else { + Err(FluidError::InvalidState { + reason: "Two-phase (P,x) requires saturation table".to_string(), + }) + } + } + FluidState::PressureEntropy(_, _) => Err(FluidError::InvalidState { + reason: "TabularBackend does not yet support (P,s) state".to_string(), + }), + FluidState::PressureTemperatureMixture(_, _, _) + | FluidState::PressureEnthalpyMixture(_, _, _) + | FluidState::PressureQualityMixture(_, _, _) => { + // TabularBackend does not support mixtures - fallback to error + Err(FluidError::MixtureNotSupported( + "TabularBackend does not support mixtures. Use CoolPropBackend.".to_string(), + )) + } + } + } + + /// Find T given (P, h) using Newton iteration on the enthalpy table. + fn find_t_from_ph(&self, table: &FluidTable, p: f64, h_target: f64) -> FluidResult<(f64, f64)> { + // Initial guess: use midpoint of T range + let t_grid = &table.single_phase.temperature; + if t_grid.len() < 2 { + return Err(FluidError::InvalidState { + reason: "Table too small for (P,h) lookup".to_string(), + }); + } + + let mut t = (t_grid[0] + t_grid[t_grid.len() - 1]) / 2.0; + let dt_fd = 0.1; // K, for finite difference + + for _ in 0..20 { + let h = table + .single_phase + .interpolate("enthalpy", p, t, &table.fluid_id)?; + let err = h - h_target; + + if err.abs() < 1.0 { + return Ok((p, t)); + } + + let h_plus = table + .single_phase + .interpolate("enthalpy", p, t + dt_fd, &table.fluid_id) + .unwrap_or(h); + let dh_dt = (h_plus - h) / dt_fd; + + if dh_dt.abs() < 1e-10 { + return Err(FluidError::NumericalError( + "Zero dh/dT in (P,h) Newton iteration".to_string(), + )); + } + + t -= err / dh_dt; + + if t < t_grid[0] || t > t_grid[t_grid.len() - 1] { + return Err(FluidError::OutOfBounds { + fluid: table.fluid_id.clone(), + p, + t, + }); + } + } + + Err(FluidError::NumericalError( + "Newton iteration did not converge for (P,h)".to_string(), + )) + } + + /// Get property for two-phase (P, x) via linear blend. + fn property_two_phase( + &self, + table: &FluidTable, + p: f64, + x: f64, + property: Property, + ) -> FluidResult { + let sat = table.saturation.as_ref().ok_or(FluidError::InvalidState { + reason: "Two-phase requires saturation table".to_string(), + })?; + + let (t_sat, h_liq, h_vap, rho_liq, rho_vap, s_liq, s_vap) = + sat.at_pressure(p).ok_or(FluidError::OutOfBounds { + fluid: table.fluid_id.clone(), + p, + t: 0.0, + })?; + + let val = match property { + Property::Enthalpy => h_liq * (1.0 - x) + h_vap * x, + Property::Density => { + let v_liq = 1.0 / rho_liq; + let v_vap = 1.0 / rho_vap; + let v = v_liq * (1.0 - x) + v_vap * x; + 1.0 / v + } + Property::Entropy => s_liq * (1.0 - x) + s_vap * x, + Property::Quality => x, + Property::Temperature => t_sat, + Property::Pressure => p, + _ => { + return Err(FluidError::UnsupportedProperty { + property: property.to_string(), + }) + } + }; + + Ok(val) + } + + /// Map Property enum to table property name. + fn property_table_name(property: Property) -> Option<&'static str> { + match property { + Property::Density => Some("density"), + Property::Enthalpy => Some("enthalpy"), + Property::Entropy => Some("entropy"), + Property::Cp => Some("cp"), + Property::Cv => Some("cv"), + Property::Temperature => Some("temperature"), + Property::Pressure => Some("pressure"), + _ => None, + } + } +} + +impl Default for TabularBackend { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::backend::FluidBackend; + use approx::assert_relative_eq; + use entropyk_core::{Pressure, Temperature}; + + fn make_test_backend() -> TabularBackend { + let mut backend = TabularBackend::new(); + let path = std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("data/r134a.json"); + backend.load_table(&path).unwrap(); + backend + } + + #[test] + fn test_tabular_load_r134a() { + let backend = make_test_backend(); + assert!(backend.is_fluid_available(&FluidId::new("R134a"))); + assert!(!backend.is_fluid_available(&FluidId::new("R999"))); + } + + #[test] + fn test_tabular_property_pt() { + let backend = make_test_backend(); + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + + let density = backend + .property(FluidId::new("R134a"), Property::Density, state.clone()) + .unwrap(); + assert!(density > 1.0 && density < 100.0); + + let enthalpy = backend + .property(FluidId::new("R134a"), Property::Enthalpy, state) + .unwrap(); + assert!(enthalpy > 300_000.0 && enthalpy < 500_000.0); + } + + /// Accuracy: at grid point (200 kPa, 290 K), density must match table exactly. + #[test] + fn test_tabular_accuracy_at_grid_point() { + let backend = make_test_backend(); + let state = FluidState::from_pt( + Pressure::from_pascals(200_000.0), + Temperature::from_kelvin(290.0), + ); + let density = backend + .property(FluidId::new("R134a"), Property::Density, state) + .unwrap(); + assert_relative_eq!(density, 9.0, epsilon = 1e-10); + } + + /// Accuracy: interpolated value within 1% (table self-consistency check). + #[test] + fn test_tabular_accuracy_interpolated() { + let backend = make_test_backend(); + let state = FluidState::from_pt( + Pressure::from_pascals(200_000.0), + Temperature::from_kelvin(300.0), + ); + let density = backend + .property(FluidId::new("R134a"), Property::Density, state) + .unwrap(); + assert_relative_eq!(density, 8.415, epsilon = 0.01); + } + + #[test] + fn test_tabular_critical_point() { + let backend = make_test_backend(); + let cp = backend.critical_point(FluidId::new("R134a")).unwrap(); + assert!((cp.temperature_kelvin() - 374.21).abs() < 1.0); + assert!((cp.pressure_pascals() - 4.059e6).abs() < 1e4); + } + + #[test] + fn test_tabular_list_fluids() { + let backend = make_test_backend(); + let fluids = backend.list_fluids(); + assert_eq!(fluids.len(), 1); + assert_eq!(fluids[0].0, "R134a"); + } + + #[test] + fn test_tabular_unknown_fluid() { + let backend = make_test_backend(); + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + let result = backend.property(FluidId::new("R999"), Property::Density, state); + assert!(result.is_err()); + } + + #[test] + fn test_tabular_out_of_bounds() { + let backend = make_test_backend(); + let state = FluidState::from_pt( + Pressure::from_pascals(50_000.0), + Temperature::from_kelvin(200.0), + ); + let result = backend.property(FluidId::new("R134a"), Property::Density, state); + assert!(result.is_err()); + } + + #[test] + fn test_tabular_ph_state() { + let backend = make_test_backend(); + let state = FluidState::from_ph( + Pressure::from_bar(1.0), + entropyk_core::Enthalpy::from_kilojoules_per_kg(415.0), + ); + let density = backend + .property(FluidId::new("R134a"), Property::Density, state) + .unwrap(); + assert!(density > 1.0); + } + + #[test] + fn test_tabular_px_state() { + let backend = make_test_backend(); + let state = FluidState::from_px( + Pressure::from_pascals(500_000.0), + crate::types::Quality::new(0.5), + ); + let enthalpy = backend + .property(FluidId::new("R134a"), Property::Enthalpy, state) + .unwrap(); + assert!(enthalpy > 300_000.0 && enthalpy < 450_000.0); + } + + #[test] + fn test_tabular_benchmark_10k_queries() { + let backend = make_test_backend(); + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + let start = std::time::Instant::now(); + for _ in 0..10_000 { + let _ = backend.property(FluidId::new("R134a"), Property::Density, state.clone()); + } + let elapsed = start.elapsed(); + assert!( + elapsed.as_millis() < 100, + "10k queries took {}ms, expected < 100ms (debug mode)", + elapsed.as_millis() + ); + } + + /// Release build: 10k queries must complete in < 10ms (AC #3). + #[test] + #[cfg_attr(debug_assertions, ignore = "run with cargo test --release")] + fn test_tabular_benchmark_10k_queries_release() { + let backend = make_test_backend(); + let state = FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + let start = std::time::Instant::now(); + for _ in 0..10_000 { + let _ = backend.property(FluidId::new("R134a"), Property::Density, state.clone()); + } + let elapsed = start.elapsed(); + assert!( + elapsed.as_millis() < 10, + "10k queries took {}ms, expected < 10ms in release", + elapsed.as_millis() + ); + } + + /// Compare TabularBackend vs CoolPropBackend. Embedded r134a.json may be from + /// reference data; use epsilon 1% for compatibility. CoolProp-generated tables + /// achieve < 0.01% (validated in generator::test_generated_table_vs_coolprop_spot_checks). + #[test] + #[cfg(feature = "coolprop")] + fn test_tabular_vs_coolprop_accuracy() { + use crate::coolprop::CoolPropBackend; + use crate::types::Quality; + + let mut tabular = TabularBackend::new(); + let path = std::path::Path::new(env!("CARGO_MANIFEST_DIR")).join("data/r134a.json"); + tabular.load_table(&path).unwrap(); + + let coolprop = CoolPropBackend::new(); + let fluid = FluidId::new("R134a"); + + // (P, T) at 1 bar, 25°C + let state_pt = + FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + let rho_t = tabular + .property(fluid.clone(), Property::Density, state_pt) + .unwrap(); + let rho_c = coolprop + .property(fluid.clone(), Property::Density, state_pt) + .unwrap(); + assert_relative_eq!(rho_t, rho_c, epsilon = 0.01 * rho_c.max(1.0)); + + let h_t = tabular + .property(fluid.clone(), Property::Enthalpy, state_pt) + .unwrap(); + let h_c = coolprop + .property(fluid.clone(), Property::Enthalpy, state_pt) + .unwrap(); + assert_relative_eq!(h_t, h_c, epsilon = 0.01 * h_c.max(1.0)); + + // (P, h) at 1 bar, h ≈ 415 kJ/kg + let state_ph = FluidState::from_ph( + Pressure::from_bar(1.0), + entropyk_core::Enthalpy::from_kilojoules_per_kg(415.0), + ); + let rho_t_ph = tabular + .property(fluid.clone(), Property::Density, state_ph) + .unwrap(); + let rho_c_ph = coolprop + .property(fluid.clone(), Property::Density, state_ph) + .unwrap(); + assert_relative_eq!(rho_t_ph, rho_c_ph, epsilon = 0.01 * rho_c_ph.max(1.0)); + + // (P, x) at 500 kPa, x = 0.5 + let state_px = FluidState::from_px(Pressure::from_pascals(500_000.0), Quality::new(0.5)); + let h_t_px = tabular + .property(fluid.clone(), Property::Enthalpy, state_px) + .unwrap(); + let h_c_px = coolprop + .property(fluid.clone(), Property::Enthalpy, state_px) + .unwrap(); + assert_relative_eq!(h_t_px, h_c_px, epsilon = 0.01 * h_c_px.max(1.0)); + } +} + +impl FluidBackend for TabularBackend { + fn property(&self, fluid: FluidId, property: Property, state: FluidState) -> FluidResult { + let table = self.get_table(&fluid).ok_or(FluidError::UnknownFluid { + fluid: fluid.0.clone(), + })?; + + // Handle (P, x) two-phase explicitly + if let FluidState::PressureQuality(p, x) = state { + return self.property_two_phase(table, p.to_pascals(), x.value(), property); + } + + let (p, t) = self.resolve_state(&fluid, state)?; + + // Temperature and Pressure are direct + if property == Property::Temperature { + return Ok(t); + } + if property == Property::Pressure { + return Ok(p); + } + + let name = Self::property_table_name(property).ok_or(FluidError::UnsupportedProperty { + property: property.to_string(), + })?; + + table.single_phase.interpolate(name, p, t, &fluid.0) + } + + fn critical_point(&self, fluid: FluidId) -> FluidResult { + let table = self.get_table(&fluid).ok_or(FluidError::NoCriticalPoint { + fluid: fluid.0.clone(), + })?; + + let cp = &table.critical_point; + Ok(CriticalPoint::new(cp.temperature, cp.pressure, cp.density)) + } + + fn is_fluid_available(&self, fluid: &FluidId) -> bool { + self.tables.contains_key(&fluid.0) + } + + fn phase(&self, fluid: FluidId, state: FluidState) -> FluidResult { + let table = self.get_table(&fluid).ok_or(FluidError::UnknownFluid { + fluid: fluid.0.clone(), + })?; + + let (p, t) = self.resolve_state(&fluid, state.clone())?; + let pc = table.critical_point.pressure.to_pascals(); + let tc = table.critical_point.temperature.to_kelvin(); + + if p > pc && t > tc { + return Ok(Phase::Supercritical); + } + + if let Some(ref sat) = table.saturation { + if let Some((_, h_liq, h_vap, _, _, _, _)) = sat.at_pressure(p) { + if let FluidState::PressureEnthalpy(_, h) = state { + let hv = h.to_joules_per_kg(); + if hv <= h_liq { + return Ok(Phase::Liquid); + } + if hv >= h_vap { + return Ok(Phase::Vapor); + } + return Ok(Phase::TwoPhase); + } + if let FluidState::PressureQuality(_, x) = state { + if x.value() <= 0.0 { + return Ok(Phase::Liquid); + } + if x.value() >= 1.0 { + return Ok(Phase::Vapor); + } + return Ok(Phase::TwoPhase); + } + } + } + + Ok(Phase::Unknown) + } + + fn list_fluids(&self) -> Vec { + self.fluid_ids + .iter() + .map(|s| FluidId::new(s.clone())) + .collect() + } + + fn full_state(&self, fluid: FluidId, p: entropyk_core::Pressure, h: entropyk_core::Enthalpy) -> FluidResult { + let t_k = self.property(fluid.clone(), Property::Temperature, FluidState::from_ph(p, h))?; + Err(FluidError::UnsupportedProperty { + property: format!("full_state for TabularBackend: Temperature is {:.2} K", t_k), + }) + } +} diff --git a/crates/fluids/src/test_backend.rs b/crates/fluids/src/test_backend.rs new file mode 100644 index 0000000..c15d30a --- /dev/null +++ b/crates/fluids/src/test_backend.rs @@ -0,0 +1,430 @@ +//! Test backend implementation for unit testing. +//! +//! This module provides a mock backend that returns simplified/idealized +//! property values for testing without requiring external dependencies +//! like CoolProp. + +use crate::backend::FluidBackend; +use crate::errors::{FluidError, FluidResult}; +#[cfg(test)] +use crate::mixture::Mixture; +use crate::types::{CriticalPoint, FluidId, Phase, Property, FluidState}; +use entropyk_core::{Pressure, Temperature}; +use std::collections::HashMap; + +/// Test backend for unit testing. +/// +/// This backend provides simplified thermodynamic property calculations +/// suitable for testing without external dependencies. Values are idealized +/// approximations and should NOT be used for real simulations. +pub struct TestBackend { + /// Map of fluid names to critical points + critical_points: HashMap, + /// List of available test fluids + available_fluids: Vec, +} + +impl TestBackend { + /// Creates a new TestBackend with default test fluids. + pub fn new() -> Self { + let mut critical_points = HashMap::new(); + + // CO2 (R744) + critical_points.insert( + "CO2".to_string(), + CriticalPoint::new( + Temperature::from_kelvin(304.13), + Pressure::from_pascals(7.3773e6), + 467.0, + ), + ); + + // R134a + critical_points.insert( + "R134a".to_string(), + CriticalPoint::new( + Temperature::from_kelvin(374.21), + Pressure::from_pascals(4.059e6), + 512.0, + ), + ); + + // R410A + critical_points.insert( + "R410A".to_string(), + CriticalPoint::new( + Temperature::from_kelvin(344.49), + Pressure::from_pascals(4.926e6), + 458.0, + ), + ); + + // R32 + critical_points.insert( + "R32".to_string(), + CriticalPoint::new( + Temperature::from_kelvin(351.25), + Pressure::from_pascals(5.782e6), + 360.0, + ), + ); + + // Water + critical_points.insert( + "Water".to_string(), + CriticalPoint::new( + Temperature::from_kelvin(647.096), + Pressure::from_pascals(22.064e6), + 322.0, + ), + ); + + let available_fluids = vec![ + "CO2".to_string(), + "R134a".to_string(), + "R410A".to_string(), + "R32".to_string(), + "Water".to_string(), + "Nitrogen".to_string(), + "Oxygen".to_string(), + "Air".to_string(), + ]; + + TestBackend { + critical_points, + available_fluids, + } + } + + /// Simplified ideal gas property calculation. + fn ideal_property( + &self, + fluid: &str, + property: Property, + state: FluidState, + ) -> FluidResult { + // Simple ideal gas approximations for testing + // Real implementation would use proper equations of state + match fluid { + "Nitrogen" | "Oxygen" | "Air" => self.ideal_gas_property(property, state, 29.0), + "Water" => self.water_property(property, state), + _ => { + // For refrigerants, use simplified correlations + self.refrigerant_property(fluid, property, state) + } + } + } + + fn ideal_gas_property( + &self, + property: Property, + state: FluidState, + _molar_mass: f64, + ) -> FluidResult { + let (p, t) = match state { + FluidState::PressureTemperature(p, t) => (p.to_pascals(), t.to_kelvin()), + _ => { + return Err(FluidError::InvalidState { + reason: "TestBackend only supports P-T state for ideal gases".to_string(), + }) + } + }; + + // Simplified ideal gas: R = 8.314 J/(mol·K), approximate + let r_specific = 287.0; // J/(kg·K) for air + + match property { + Property::Density => Ok(p / (r_specific * t)), + Property::Enthalpy => Ok(1005.0 * t), // Cp * T, Cp ≈ 1005 J/(kg·K) for air + Property::Entropy => Ok(r_specific * t.ln()), // Simplified + Property::Cp => Ok(1005.0), // Constant pressure specific heat + Property::Cv => Ok(718.0), // Constant volume specific heat + Property::Temperature => Ok(t), + Property::Pressure => Ok(p), + Property::ThermalConductivity => Ok(0.025), // W/(m·K) for air + Property::Viscosity => Ok(1.8e-5), // Pa·s for air + _ => Err(FluidError::UnsupportedProperty { + property: property.to_string(), + }), + } + } + + fn water_property(&self, property: Property, state: FluidState) -> FluidResult { + let (p, t) = match state { + FluidState::PressureTemperature(p, t) => (p.to_pascals(), t.to_kelvin()), + _ => { + return Err(FluidError::InvalidState { + reason: "TestBackend only supports P-T state for water".to_string(), + }) + } + }; + + // Simplified water properties at ~1 atm + if p < 1.1e5 && t > 273.15 && t < 373.15 { + match property { + Property::Density => Ok(1000.0), // kg/m³ + Property::Enthalpy => Ok(4200.0 * (t - 273.15)), // Cp * ΔT + Property::Cp => Ok(4184.0), // J/(kg·K) + Property::ThermalConductivity => Ok(0.6), // W/(m·K) + Property::Viscosity => Ok(0.001), // Pa·s + Property::Temperature => Ok(t), + Property::Pressure => Ok(p), + _ => Err(FluidError::UnsupportedProperty { + property: property.to_string(), + }), + } + } else { + Err(FluidError::InvalidState { + reason: "Water property only valid in liquid region".to_string(), + }) + } + } + + fn refrigerant_property( + &self, + _fluid: &str, + property: Property, + state: FluidState, + ) -> FluidResult { + let (p, t) = match state { + FluidState::PressureTemperature(p, t) => (p.to_pascals(), t.to_kelvin()), + _ => { + return Err(FluidError::InvalidState { + reason: "TestBackend only supports P-T state for refrigerants".to_string(), + }) + } + }; + + // Simplified refrigerant properties + match property { + Property::Density => { + // Rough approximation for liquid (~1000 kg/m³) vs vapor (~10-50 kg/m³) + if p > 1e6 { + Ok(1000.0) // Liquid + } else { + Ok(30.0) // Vapor + } + } + Property::Enthalpy => { + if p > 1e6 { + Ok(200000.0) // Liquid region + } else { + Ok(400000.0) // Vapor region + } + } + Property::Cp => Ok(1500.0), // Approximate + Property::Temperature => Ok(t), + Property::Pressure => Ok(p), + Property::ThermalConductivity => Ok(0.015), + Property::Viscosity => Ok(1.5e-5), + _ => Err(FluidError::UnsupportedProperty { + property: property.to_string(), + }), + } + } + + fn determine_phase(&self, fluid: &str, state: FluidState) -> Phase { + let (p, t) = match state { + FluidState::PressureTemperature(p, t) => (p.to_pascals(), t.to_kelvin()), + _ => return Phase::Unknown, + }; + + // Get critical point if available + if let Some(cp) = self.critical_points.get(fluid) { + let pc = cp.pressure_pascals(); + let tc = cp.temperature_kelvin(); + + if p > pc && t > tc { + return Phase::Supercritical; + } + if (p - pc).abs() / pc < 0.05 || (t - tc).abs() / tc < 0.05 { + return Phase::Supercritical; + } + } + + // Simplified phase determination + if p > 5e5 { + Phase::Liquid + } else { + Phase::Vapor + } + } +} + +impl Default for TestBackend { + fn default() -> Self { + Self::new() + } +} + +impl FluidBackend for TestBackend { + fn property(&self, fluid: FluidId, property: Property, state: FluidState) -> FluidResult { + if !self.is_fluid_available(&fluid) { + return Err(FluidError::UnknownFluid { + fluid: fluid.0.clone(), + }); + } + + self.ideal_property(&fluid.0, property, state) + } + + fn critical_point(&self, fluid: FluidId) -> FluidResult { + self.critical_points + .get(&fluid.0) + .copied() + .ok_or(FluidError::NoCriticalPoint { fluid: fluid.0 }) + } + + fn is_fluid_available(&self, fluid: &FluidId) -> bool { + self.available_fluids.iter().any(|f| f == &fluid.0) + } + + fn phase(&self, fluid: FluidId, state: FluidState) -> FluidResult { + if !self.is_fluid_available(&fluid) { + return Err(FluidError::UnknownFluid { fluid: fluid.0 }); + } + + Ok(self.determine_phase(&fluid.0, state)) + } + + fn list_fluids(&self) -> Vec { + self.available_fluids + .iter() + .map(|s| FluidId::new(s.clone())) + .collect() + } + + fn full_state(&self, fluid: FluidId, p: entropyk_core::Pressure, h: entropyk_core::Enthalpy) -> FluidResult { + let t_k = self.property(fluid.clone(), Property::Temperature, FluidState::from_ph(p, h))?; + Err(FluidError::UnsupportedProperty { + property: format!("full_state for TestBackend: Temperature is {:.2} K", t_k), + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_backend_available_fluids() { + let backend = TestBackend::new(); + + assert!(backend.is_fluid_available(&FluidId::new("CO2"))); + assert!(backend.is_fluid_available(&FluidId::new("R134a"))); + assert!(!backend.is_fluid_available(&FluidId::new("R999"))); + } + + #[test] + fn test_list_fluids() { + let backend = TestBackend::new(); + let fluids = backend.list_fluids(); + + assert!(fluids.len() > 0); + assert!(fluids.iter().any(|f| f.0 == "CO2")); + } + + #[test] + fn test_critical_point() { + let backend = TestBackend::new(); + + let cp = backend.critical_point(FluidId::new("CO2")).unwrap(); + assert!((cp.temperature_kelvin() - 304.13).abs() < 0.1); + assert!((cp.pressure_pascals() - 7.3773e6).abs() < 1e4); + } + + #[test] + fn test_critical_point_not_available() { + let backend = TestBackend::new(); + + let result = backend.critical_point(FluidId::new("UnknownFluid")); + assert!(result.is_err()); + } + + #[test] + fn test_property_nitrogen() { + let backend = TestBackend::new(); + + let state = FluidState::from_pt( + Pressure::from_pascals(101325.0), + Temperature::from_kelvin(300.0), + ); + + let density = backend + .property(FluidId::new("Nitrogen"), Property::Density, state) + .unwrap(); + + assert!(density > 1.0); // Should be ~1.16 kg/m³ + } + + #[test] + fn test_property_water() { + let backend = TestBackend::new(); + + let state = FluidState::from_pt( + Pressure::from_pascals(101325.0), + Temperature::from_celsius(25.0), + ); + + let density = backend + .property(FluidId::new("Water"), Property::Density, state) + .unwrap(); + + assert!((density - 1000.0).abs() < 1.0); + } + + #[test] + fn test_property_unknown_fluid() { + let backend = TestBackend::new(); + + let state = FluidState::from_pt( + Pressure::from_pascals(101325.0), + Temperature::from_kelvin(300.0), + ); + + let result = backend.property(FluidId::new("R999"), Property::Density, state); + + assert!(result.is_err()); + } + + #[test] + fn test_phase_co2_supercritical() { + let backend = TestBackend::new(); + + // Above critical point + let state = + FluidState::from_pt(Pressure::from_pascals(8e6), Temperature::from_kelvin(320.0)); + + let phase = backend.phase(FluidId::new("CO2"), state).unwrap(); + assert_eq!(phase, Phase::Supercritical); + } + + #[test] + fn test_phase_liquid() { + let backend = TestBackend::new(); + + let state = FluidState::from_pt( + Pressure::from_pascals(10e5), + Temperature::from_celsius(25.0), + ); + + let phase = backend.phase(FluidId::new("Water"), state).unwrap(); + assert_eq!(phase, Phase::Liquid); + } + + #[test] + fn test_thermo_state_is_mixture() { + let mix = Mixture::from_mass_fractions(&[("R32", 0.5), ("R1234yf", 0.5)]).unwrap(); + + let state_pure = + FluidState::from_pt(Pressure::from_bar(1.0), Temperature::from_celsius(25.0)); + assert!(!state_pure.is_mixture()); + + let state_mix = FluidState::from_pt_mix( + Pressure::from_bar(1.0), + Temperature::from_celsius(25.0), + mix, + ); + assert!(state_mix.is_mixture()); + } +} diff --git a/crates/fluids/src/types.rs b/crates/fluids/src/types.rs new file mode 100644 index 0000000..6ce0d57 --- /dev/null +++ b/crates/fluids/src/types.rs @@ -0,0 +1,369 @@ +//! Types for fluid property calculations. +//! +//! This module defines the core types used to represent thermodynamic states, +//! fluid identifiers, and properties in the fluid backend system. + +use crate::mixture::Mixture; +use entropyk_core::{Enthalpy, Pressure, Temperature}; +use std::fmt; + +/// Difference between two temperatures in Kelvin. +#[derive(Debug, Clone, Copy, PartialEq, PartialOrd)] +pub struct TemperatureDelta(pub f64); + +impl TemperatureDelta { + /// Creates a new TemperatureDelta from a difference in Kelvin. + pub fn new(kelvin_diff: f64) -> Self { + TemperatureDelta(kelvin_diff) + } + + /// Gets the temperature difference in Kelvin. + pub fn kelvin(&self) -> f64 { + self.0 + } +} + +impl From for TemperatureDelta { + fn from(val: f64) -> Self { + TemperatureDelta(val) + } +} + +/// Unique identifier for a fluid. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct FluidId(pub String); + +impl FluidId { + /// Creates a new FluidId from a string. + pub fn new(name: impl Into) -> Self { + FluidId(name.into()) + } +} + +impl fmt::Display for FluidId { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl From<&str> for FluidId { + fn from(s: &str) -> Self { + FluidId(s.to_string()) + } +} + +impl From for FluidId { + fn from(s: String) -> Self { + FluidId(s) + } +} + +/// Thermodynamic property that can be queried from a backend. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Property { + /// Density (kg/m³) + Density, + /// Specific enthalpy (J/kg) + Enthalpy, + /// Specific entropy (J/(kg·K)) + Entropy, + /// Specific internal energy (J/kg) + InternalEnergy, + /// Specific heat capacity at constant pressure (J/(kg·K)) + Cp, + /// Specific heat capacity at constant volume (J/(kg·K)) + Cv, + /// Speed of sound (m/s) + SpeedOfSound, + /// Dynamic viscosity (Pa·s) + Viscosity, + /// Thermal conductivity (W/(m·K)) + ThermalConductivity, + /// Surface tension (N/m) + SurfaceTension, + /// Quality (0-1 for two-phase) + Quality, + /// Temperature (K) + Temperature, + /// Pressure (Pa) + Pressure, +} + +impl fmt::Display for Property { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Property::Density => write!(f, "Density"), + Property::Enthalpy => write!(f, "Enthalpy"), + Property::Entropy => write!(f, "Entropy"), + Property::InternalEnergy => write!(f, "InternalEnergy"), + Property::Cp => write!(f, "Cp"), + Property::Cv => write!(f, "Cv"), + Property::SpeedOfSound => write!(f, "SpeedOfSound"), + Property::Viscosity => write!(f, "Viscosity"), + Property::ThermalConductivity => write!(f, "ThermalConductivity"), + Property::SurfaceTension => write!(f, "SurfaceTension"), + Property::Quality => write!(f, "Quality"), + Property::Temperature => write!(f, "Temperature"), + Property::Pressure => write!(f, "Pressure"), + } + } +} + +/// Input specification for thermodynamic state calculation. +/// +/// Defines what inputs are available to look up a thermodynamic property. +#[derive(Debug, Clone, PartialEq)] +pub enum FluidState { + /// Pressure and temperature (P, T) - most common for single-phase + PressureTemperature(Pressure, Temperature), + /// Pressure and enthalpy (P, h) - common for expansion/compression + PressureEnthalpy(Pressure, Enthalpy), + /// Pressure and entropy (P, s) - useful for isentropic processes + PressureEntropy(Pressure, Entropy), + /// Pressure and quality (P, x) - for two-phase regions + PressureQuality(Pressure, Quality), + /// Pressure and temperature with mixture (P, T, mixture) + PressureTemperatureMixture(Pressure, Temperature, Mixture), + /// Pressure and enthalpy with mixture (P, h, mixture) - preferred for two-phase + PressureEnthalpyMixture(Pressure, Enthalpy, Mixture), + /// Pressure and quality with mixture (P, x, mixture) - for two-phase mixtures + PressureQualityMixture(Pressure, Quality, Mixture), +} + +impl FluidState { + /// Creates a state from pressure and temperature. + pub fn from_pt(p: Pressure, t: Temperature) -> Self { + FluidState::PressureTemperature(p, t) + } + + /// Creates a state from pressure and enthalpy. + pub fn from_ph(p: Pressure, h: Enthalpy) -> Self { + FluidState::PressureEnthalpy(p, h) + } + + /// Creates a state from pressure and entropy. + pub fn from_ps(p: Pressure, s: Entropy) -> Self { + FluidState::PressureEntropy(p, s) + } + + /// Creates a state from pressure and quality. + pub fn from_px(p: Pressure, x: Quality) -> Self { + FluidState::PressureQuality(p, x) + } + + /// Creates a state from pressure, temperature, and mixture. + pub fn from_pt_mix(p: Pressure, t: Temperature, mix: Mixture) -> Self { + FluidState::PressureTemperatureMixture(p, t, mix) + } + + /// Creates a state from pressure, enthalpy, and mixture (preferred for two-phase). + pub fn from_ph_mix(p: Pressure, h: Enthalpy, mix: Mixture) -> Self { + FluidState::PressureEnthalpyMixture(p, h, mix) + } + + /// Creates a state from pressure, quality, and mixture. + pub fn from_px_mix(p: Pressure, x: Quality, mix: Mixture) -> Self { + FluidState::PressureQualityMixture(p, x, mix) + } + + /// Check if this state contains a mixture. + pub fn is_mixture(&self) -> bool { + matches!( + self, + FluidState::PressureTemperatureMixture(_, _, _) + | FluidState::PressureEnthalpyMixture(_, _, _) + | FluidState::PressureQualityMixture(_, _, _) + ) + } +} + +/// Entropy in J/(kg·K). +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct Entropy(pub f64); + +impl Entropy { + /// Creates entropy from J/(kg·K). + pub fn from_joules_per_kg_kelvin(value: f64) -> Self { + Entropy(value) + } + + /// Returns entropy in J/(kg·K). + pub fn to_joules_per_kg_kelvin(&self) -> f64 { + self.0 + } +} + +impl From for Entropy { + fn from(value: f64) -> Self { + Entropy(value) + } +} + +/// Quality (vapor fraction) from 0 (saturated liquid) to 1 (saturated vapor). +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct Quality(pub f64); + +impl Quality { + /// Creates a quality value (0-1). + pub fn new(value: f64) -> Self { + Quality(value.clamp(0.0, 1.0)) + } + + /// Returns the quality value (0-1). + pub fn value(&self) -> f64 { + self.0 + } +} + +impl From for Quality { + fn from(value: f64) -> Self { + Quality::new(value) + } +} + +/// Critical point data for a fluid. +#[derive(Debug, Clone, Copy, PartialEq)] +pub struct CriticalPoint { + /// Critical temperature in Kelvin. + pub temperature: Temperature, + /// Critical pressure in Pascals. + pub pressure: Pressure, + /// Critical density in kg/m³. + pub density: f64, +} + +impl CriticalPoint { + /// Creates a new CriticalPoint. + pub fn new(temperature: Temperature, pressure: Pressure, density: f64) -> Self { + CriticalPoint { + temperature, + pressure, + density, + } + } + + /// Returns critical temperature in Kelvin. + pub fn temperature_kelvin(&self) -> f64 { + self.temperature.to_kelvin() + } + + /// Returns critical pressure in Pascals. + pub fn pressure_pascals(&self) -> f64 { + self.pressure.to_pascals() + } +} + +/// Phase of matter. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum Phase { + /// Liquid phase. + Liquid, + /// Vapor/gas phase. + Vapor, + /// Two-phase region. + TwoPhase, + /// Supercritical fluid. + Supercritical, + /// Unknown or undefined phase. + Unknown, +} + +impl fmt::Display for Phase { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Phase::Liquid => write!(f, "Liquid"), + Phase::Vapor => write!(f, "Vapor"), + Phase::TwoPhase => write!(f, "TwoPhase"), + Phase::Supercritical => write!(f, "Supercritical"), + Phase::Unknown => write!(f, "Unknown"), + } + } +} + +/// Comprehensive representation of a thermodynamic state. +/// +/// This struct holds a complete snapshot of all relevant properties for a fluid +/// at a given state. It avoids the need to repeatedly query the backend for +/// individual properties once the state is resolved. +#[derive(Debug, Clone, PartialEq)] +pub struct ThermoState { + /// Fluid identifier (e.g. "R410A") + pub fluid: FluidId, + /// Absolute pressure + pub pressure: Pressure, + /// Absolute temperature + pub temperature: Temperature, + /// Specific enthalpy + pub enthalpy: Enthalpy, + /// Specific entropy + pub entropy: Entropy, + /// Density in kg/m³ + pub density: f64, + /// Physical phase of the fluid + pub phase: Phase, + /// Vapor quality (0.0 to 1.0) if in two-phase region + pub quality: Option, + /// Superheat (T - T_dew) if in superheated vapor region + pub superheat: Option, + /// Subcooling (T_bubble - T) if in subcooled liquid region + pub subcooling: Option, + /// Saturated liquid temperature at current pressure (Bubble point) + pub t_bubble: Option, + /// Saturated vapor temperature at current pressure (Dew point) + pub t_dew: Option, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_fluid_id() { + let id = FluidId::new("R134a"); + assert_eq!(id.0, "R134a"); + } + + #[test] + fn test_fluid_state_from_pt() { + let p = Pressure::from_bar(1.0); + let t = Temperature::from_celsius(25.0); + let state = FluidState::from_pt(p, t); + match state { + FluidState::PressureTemperature(p_out, t_out) => { + assert_eq!(p.to_pascals(), p_out.to_pascals()); + assert_eq!(t.to_kelvin(), t_out.to_kelvin()); + } + _ => panic!("Expected PressureTemperature variant"), + } + } + + #[test] + fn test_quality_bounds() { + let q1 = Quality::new(0.5); + assert!((q1.value() - 0.5).abs() < 1e-10); + + let q2 = Quality::new(1.5); + assert!((q2.value() - 1.0).abs() < 1e-10); + + let q3 = Quality::new(-0.5); + assert!((q3.value() - 0.0).abs() < 1e-10); + } + + #[test] + fn test_critical_point() { + // CO2 critical point: Tc = 304.13 K, Pc = 7.3773 MPa + let cp = CriticalPoint::new( + Temperature::from_kelvin(304.13), + Pressure::from_pascals(7.3773e6), + 467.0, + ); + assert!((cp.temperature_kelvin() - 304.13).abs() < 0.01); + assert!((cp.pressure_pascals() - 7.3773e6).abs() < 1.0); + } + + #[test] + fn test_property_display() { + assert_eq!(format!("{}", Property::Density), "Density"); + assert_eq!(format!("{}", Property::Enthalpy), "Enthalpy"); + } +} diff --git a/crates/solver/Cargo.toml b/crates/solver/Cargo.toml new file mode 100644 index 0000000..77b44f5 --- /dev/null +++ b/crates/solver/Cargo.toml @@ -0,0 +1,23 @@ +[package] +name = "entropyk-solver" +version = "0.1.0" +edition = "2021" +authors = ["Sepehr "] +description = "System topology and solver engine for Entropyk thermodynamic simulation" +license = "MIT OR Apache-2.0" +repository = "https://github.com/entropyk/entropyk" + +[dependencies] +entropyk-components = { path = "../components" } +entropyk-core = { path = "../core" } +nalgebra = "0.33" +petgraph = "0.6" +thiserror = "1.0" +tracing = "0.1" + +[dev-dependencies] +approx = "0.5" + +[lib] +name = "entropyk_solver" +path = "src/lib.rs" diff --git a/crates/solver/src/coupling.rs b/crates/solver/src/coupling.rs new file mode 100644 index 0000000..57691cf --- /dev/null +++ b/crates/solver/src/coupling.rs @@ -0,0 +1,435 @@ +//! Thermal coupling between circuits for heat transfer. +//! +//! This module provides the infrastructure for modeling heat exchange between +//! independent fluid circuits. Thermal couplings represent heat exchangers +//! that transfer heat from a "hot" circuit to a "cold" circuit without +//! fluid mixing. +//! +//! ## Sign Convention +//! +//! Heat transfer Q > 0 means heat flows INTO the cold circuit (out of hot circuit). +//! This follows the convention that the cold circuit receives heat. +//! +//! ## Coupling Graph and Circular Dependencies +//! +//! Thermal couplings form a directed graph where: +//! - Nodes are circuits (CircuitId) +//! - Edges point from hot_circuit to cold_circuit (direction of heat flow) +//! +//! Circular dependencies occur when circuits mutually heat each other (A→B and B→A). +//! Circuits in circular dependencies must be solved simultaneously by the solver. + +use entropyk_core::{Temperature, ThermalConductance}; +use petgraph::algo::{is_cyclic_directed, kosaraju_scc}; +use petgraph::graph::{DiGraph, NodeIndex}; +use std::collections::HashMap; + +use crate::system::CircuitId; + +/// Thermal coupling between two circuits via a heat exchanger. +/// +/// Heat flows from `hot_circuit` to `cold_circuit` proportional to the +/// temperature difference and thermal conductance (UA value). +#[derive(Debug, Clone, PartialEq)] +pub struct ThermalCoupling { + /// Circuit that supplies heat (higher temperature side). + pub hot_circuit: CircuitId, + /// Circuit that receives heat (lower temperature side). + pub cold_circuit: CircuitId, + /// Thermal conductance (UA) in W/K. Higher values = more heat transfer. + pub ua: ThermalConductance, + /// Efficiency factor (0.0 to 1.0). Default is 1.0 (no losses). + pub efficiency: f64, +} + +impl ThermalCoupling { + /// Creates a new thermal coupling between two circuits. + /// + /// # Arguments + /// + /// * `hot_circuit` - Circuit at higher temperature (heat source) + /// * `cold_circuit` - Circuit at lower temperature (heat sink) + /// * `ua` - Thermal conductance in W/K + /// + /// # Example + /// + /// ``` + /// use entropyk_solver::{ThermalCoupling, CircuitId}; + /// use entropyk_core::ThermalConductance; + /// + /// let coupling = ThermalCoupling::new( + /// CircuitId(0), + /// CircuitId(1), + /// ThermalConductance::from_watts_per_kelvin(1000.0), + /// ); + /// ``` + pub fn new(hot_circuit: CircuitId, cold_circuit: CircuitId, ua: ThermalConductance) -> Self { + Self { + hot_circuit, + cold_circuit, + ua, + efficiency: 1.0, + } + } + + /// Sets the efficiency factor for the coupling. + /// + /// Efficiency accounts for heat losses in the heat exchanger. + /// A value of 0.9 means 90% of theoretical heat is transferred. + pub fn with_efficiency(mut self, efficiency: f64) -> Self { + self.efficiency = efficiency.clamp(0.0, 1.0); + self + } +} + +/// Computes heat transfer for a thermal coupling. +/// +/// # Formula +/// +/// Q = η × UA × (T_hot - T_cold) +/// +/// Where: +/// - Q is the heat transfer rate (W), positive means heat INTO cold circuit +/// - η is the efficiency factor +/// - UA is the thermal conductance (W/K) +/// - T_hot, T_cold are temperatures (K) +/// +/// # Sign Convention +/// +/// - Q > 0: Heat flows from hot to cold (normal operation) +/// - Q = 0: No temperature difference +/// - Q < 0: Cold is hotter than hot (reverse flow, unusual) +/// +/// # Example +/// +/// ``` +/// use entropyk_solver::{ThermalCoupling, CircuitId, compute_coupling_heat}; +/// use entropyk_core::{Temperature, ThermalConductance}; +/// +/// let coupling = ThermalCoupling::new( +/// CircuitId(0), +/// CircuitId(1), +/// ThermalConductance::from_watts_per_kelvin(1000.0), +/// ); +/// +/// let t_hot = Temperature::from_kelvin(350.0); +/// let t_cold = Temperature::from_kelvin(300.0); +/// +/// let q = compute_coupling_heat(&coupling, t_hot, t_cold); +/// assert!(q > 0.0, "Heat should flow from hot to cold"); +/// ``` +pub fn compute_coupling_heat( + coupling: &ThermalCoupling, + t_hot: Temperature, + t_cold: Temperature, +) -> f64 { + coupling.efficiency + * coupling.ua.to_watts_per_kelvin() + * (t_hot.to_kelvin() - t_cold.to_kelvin()) +} + +/// Builds a coupling graph for dependency analysis. +/// +/// Returns a directed graph where: +/// - Nodes are CircuitIds present in any coupling +/// - Edges point from hot_circuit to cold_circuit +fn build_coupling_graph(couplings: &[ThermalCoupling]) -> DiGraph { + let mut graph = DiGraph::new(); + let mut circuit_to_node: HashMap = HashMap::new(); + + for coupling in couplings { + // Add hot_circuit node if not present + let hot_node = *circuit_to_node + .entry(coupling.hot_circuit) + .or_insert_with(|| graph.add_node(coupling.hot_circuit)); + + // Add cold_circuit node if not present + let cold_node = *circuit_to_node + .entry(coupling.cold_circuit) + .or_insert_with(|| graph.add_node(coupling.cold_circuit)); + + // Add directed edge: hot -> cold + graph.add_edge(hot_node, cold_node, ()); + } + + graph +} + +/// Checks if the coupling graph contains circular dependencies. +/// +/// Circular dependencies occur when circuits are mutually thermally coupled +/// (e.g., A heats B, and B heats A). When circular dependencies exist, +/// the solver must solve those circuits simultaneously rather than sequentially. +/// +/// # Example +/// +/// ``` +/// use entropyk_solver::{ThermalCoupling, CircuitId, has_circular_dependencies}; +/// use entropyk_core::ThermalConductance; +/// +/// // No circular dependency: A → B → C +/// let couplings = vec![ +/// ThermalCoupling::new(CircuitId(0), CircuitId(1), ThermalConductance::from_watts_per_kelvin(100.0)), +/// ThermalCoupling::new(CircuitId(1), CircuitId(2), ThermalConductance::from_watts_per_kelvin(100.0)), +/// ]; +/// assert!(!has_circular_dependencies(&couplings)); +/// +/// // Circular dependency: A → B and B → A +/// let couplings_circular = vec![ +/// ThermalCoupling::new(CircuitId(0), CircuitId(1), ThermalConductance::from_watts_per_kelvin(100.0)), +/// ThermalCoupling::new(CircuitId(1), CircuitId(0), ThermalConductance::from_watts_per_kelvin(100.0)), +/// ]; +/// assert!(has_circular_dependencies(&couplings_circular)); +/// ``` +pub fn has_circular_dependencies(couplings: &[ThermalCoupling]) -> bool { + if couplings.is_empty() { + return false; + } + let graph = build_coupling_graph(couplings); + is_cyclic_directed(&graph) +} + +/// Returns groups of circuits that must be solved simultaneously. +/// +/// Groups are computed using strongly connected components (SCC) analysis +/// of the coupling graph. Circuits in the same SCC have circular thermal +/// dependencies and must be solved together. +/// +/// # Returns +/// +/// A vector of vectors, where each inner vector contains CircuitIds that +/// must be solved simultaneously. Single-element vectors indicate circuits +/// that can be solved independently (in topological order). +/// +/// # Example +/// +/// ``` +/// use entropyk_solver::{ThermalCoupling, CircuitId, coupling_groups}; +/// use entropyk_core::ThermalConductance; +/// +/// // A → B, B and C independent +/// let couplings = vec![ +/// ThermalCoupling::new(CircuitId(0), CircuitId(1), ThermalConductance::from_watts_per_kelvin(100.0)), +/// ]; +/// let groups = coupling_groups(&couplings); +/// // Groups will contain individual circuits since there's no cycle +/// ``` +pub fn coupling_groups(couplings: &[ThermalCoupling]) -> Vec> { + if couplings.is_empty() { + return Vec::new(); + } + + let graph = build_coupling_graph(couplings); + let sccs = kosaraju_scc(&graph); + + sccs.into_iter() + .map(|node_indices| node_indices.into_iter().map(|idx| graph[idx]).collect()) + .collect() +} + +#[cfg(test)] +mod tests { + use super::*; + use approx::assert_relative_eq; + + fn make_coupling(hot: u8, cold: u8, ua_w_per_k: f64) -> ThermalCoupling { + ThermalCoupling::new( + CircuitId(hot), + CircuitId(cold), + ThermalConductance::from_watts_per_kelvin(ua_w_per_k), + ) + } + + #[test] + fn test_thermal_coupling_creation() { + let coupling = ThermalCoupling::new( + CircuitId(0), + CircuitId(1), + ThermalConductance::from_watts_per_kelvin(1000.0), + ); + + assert_eq!(coupling.hot_circuit, CircuitId(0)); + assert_eq!(coupling.cold_circuit, CircuitId(1)); + assert_relative_eq!(coupling.ua.to_watts_per_kelvin(), 1000.0, epsilon = 1e-10); + assert_relative_eq!(coupling.efficiency, 1.0, epsilon = 1e-10); + } + + #[test] + fn test_thermal_coupling_with_efficiency() { + let coupling = ThermalCoupling::new( + CircuitId(0), + CircuitId(1), + ThermalConductance::from_watts_per_kelvin(1000.0), + ) + .with_efficiency(0.85); + + assert_relative_eq!(coupling.efficiency, 0.85, epsilon = 1e-10); + } + + #[test] + fn test_efficiency_clamped() { + let coupling = make_coupling(0, 1, 100.0).with_efficiency(1.5); + assert_relative_eq!(coupling.efficiency, 1.0, epsilon = 1e-10); + + let coupling = make_coupling(0, 1, 100.0).with_efficiency(-0.5); + assert_relative_eq!(coupling.efficiency, 0.0, epsilon = 1e-10); + } + + #[test] + fn test_compute_coupling_heat_positive() { + let coupling = make_coupling(0, 1, 1000.0); + let t_hot = Temperature::from_kelvin(350.0); + let t_cold = Temperature::from_kelvin(300.0); + + let q = compute_coupling_heat(&coupling, t_hot, t_cold); + + // Q = 1.0 * 1000 * (350 - 300) = 50000 W + assert_relative_eq!(q, 50000.0, epsilon = 1e-10); + assert!(q > 0.0, "Heat should be positive (into cold circuit)"); + } + + #[test] + fn test_compute_coupling_heat_zero() { + let coupling = make_coupling(0, 1, 1000.0); + let t_hot = Temperature::from_kelvin(300.0); + let t_cold = Temperature::from_kelvin(300.0); + + let q = compute_coupling_heat(&coupling, t_hot, t_cold); + + assert_relative_eq!(q, 0.0, epsilon = 1e-10); + } + + #[test] + fn test_compute_coupling_heat_negative() { + let coupling = make_coupling(0, 1, 1000.0); + let t_hot = Temperature::from_kelvin(280.0); + let t_cold = Temperature::from_kelvin(300.0); + + let q = compute_coupling_heat(&coupling, t_hot, t_cold); + + // Q = 1000 * (280 - 300) = -20000 W (reverse flow) + assert_relative_eq!(q, -20000.0, epsilon = 1e-10); + assert!(q < 0.0, "Heat should be negative (reverse flow)"); + } + + #[test] + fn test_compute_coupling_heat_with_efficiency() { + let coupling = make_coupling(0, 1, 1000.0).with_efficiency(0.9); + let t_hot = Temperature::from_kelvin(350.0); + let t_cold = Temperature::from_kelvin(300.0); + + let q = compute_coupling_heat(&coupling, t_hot, t_cold); + + // Q = 0.9 * 1000 * 50 = 45000 W + assert_relative_eq!(q, 45000.0, epsilon = 1e-10); + } + + #[test] + fn test_energy_conservation() { + // For two circuits coupled, Q_hot = -Q_cold + // This means the heat leaving hot circuit equals heat entering cold circuit + let coupling = make_coupling(0, 1, 1000.0); + let t_hot = Temperature::from_kelvin(350.0); + let t_cold = Temperature::from_kelvin(300.0); + + let q_into_cold = compute_coupling_heat(&coupling, t_hot, t_cold); + let q_out_of_hot = -q_into_cold; // By convention + + // Heat into cold = - (heat out of hot) + assert_relative_eq!(q_into_cold, -q_out_of_hot, epsilon = 1e-10); + assert!(q_into_cold > 0.0, "Cold circuit receives heat"); + assert!(q_out_of_hot < 0.0, "Hot circuit loses heat"); + } + + #[test] + fn test_no_circular_dependency() { + // Linear chain: A → B → C + let couplings = vec![make_coupling(0, 1, 100.0), make_coupling(1, 2, 100.0)]; + + assert!(!has_circular_dependencies(&couplings)); + } + + #[test] + fn test_circular_dependency_detection() { + // Mutual: A → B and B → A + let couplings = vec![make_coupling(0, 1, 100.0), make_coupling(1, 0, 100.0)]; + + assert!(has_circular_dependencies(&couplings)); + } + + #[test] + fn test_circular_dependency_complex() { + // Triangle: A → B → C → A + let couplings = vec![ + make_coupling(0, 1, 100.0), + make_coupling(1, 2, 100.0), + make_coupling(2, 0, 100.0), + ]; + + assert!(has_circular_dependencies(&couplings)); + } + + #[test] + fn test_empty_couplings_no_cycle() { + let couplings: Vec = vec![]; + assert!(!has_circular_dependencies(&couplings)); + } + + #[test] + fn test_single_coupling_no_cycle() { + let couplings = vec![make_coupling(0, 1, 100.0)]; + assert!(!has_circular_dependencies(&couplings)); + } + + #[test] + fn test_coupling_groups_no_cycle() { + // A → B, C independent + let couplings = vec![make_coupling(0, 1, 100.0)]; + + let groups = coupling_groups(&couplings); + + // With no cycles, each circuit is its own group + assert_eq!(groups.len(), 2); + + // Each group should have exactly one circuit + for group in &groups { + assert_eq!(group.len(), 1); + } + + // Collect all circuit IDs + let all_circuits: std::collections::HashSet = + groups.iter().flat_map(|g| g.iter().copied()).collect(); + assert!(all_circuits.contains(&CircuitId(0))); + assert!(all_circuits.contains(&CircuitId(1))); + } + + #[test] + fn test_coupling_groups_with_cycle() { + // A ↔ B (mutual), C → D + let couplings = vec![ + make_coupling(0, 1, 100.0), + make_coupling(1, 0, 100.0), + make_coupling(2, 3, 100.0), + ]; + + let groups = coupling_groups(&couplings); + + // Should have 3 groups: [A, B] as one, C as one, D as one + assert_eq!(groups.len(), 3); + + // Find the group with 2 circuits (A and B) + let large_group: Vec<&Vec> = groups.iter().filter(|g| g.len() == 2).collect(); + assert_eq!(large_group.len(), 1); + + let ab_group = large_group[0]; + assert!(ab_group.contains(&CircuitId(0))); + assert!(ab_group.contains(&CircuitId(1))); + } + + #[test] + fn test_coupling_groups_empty() { + let couplings: Vec = vec![]; + let groups = coupling_groups(&couplings); + assert!(groups.is_empty()); + } +} diff --git a/crates/solver/src/error.rs b/crates/solver/src/error.rs new file mode 100644 index 0000000..e7e01bb --- /dev/null +++ b/crates/solver/src/error.rs @@ -0,0 +1,72 @@ +//! Topology and solver error types. + +use thiserror::Error; + +/// Errors that can occur during topology validation or system construction. +#[derive(Error, Debug, Clone, PartialEq)] +pub enum TopologyError { + /// A node has no edges (isolated/dangling node). + #[error("Isolated node at index {node_index}: all components must be connected")] + IsolatedNode { + /// Index of the isolated node in the graph + node_index: usize, + }, + + /// Not all ports are connected (reserved for Story 3.2 port validation). + #[error("Unconnected ports: {message}")] + #[allow(dead_code)] + UnconnectedPorts { + /// Description of which ports are unconnected + message: String, + }, + + /// Topology validation failed for another reason. + #[error("Invalid topology: {message}")] + #[allow(dead_code)] + InvalidTopology { + /// Description of the validation failure + message: String, + }, + + /// Attempted to connect nodes in different circuits via a flow edge. + /// Flow edges must connect nodes within the same circuit. Cross-circuit + /// thermal coupling is handled in Story 3.4. + #[error("Cross-circuit connection not allowed: source circuit {source_circuit}, target circuit {target_circuit}. Flow edges connect only nodes within the same circuit")] + CrossCircuitConnection { + /// Circuit ID of the source node + source_circuit: u8, + /// Circuit ID of the target node + target_circuit: u8, + }, + + /// Too many circuits requested. Maximum is 5 (circuit IDs 0..=4). + #[error("Too many circuits: requested {requested}, maximum is 5")] + TooManyCircuits { + /// The requested circuit ID that exceeded the limit + requested: u8, + }, + + /// Attempted to add thermal coupling with a circuit that doesn't exist. + #[error( + "Invalid circuit for thermal coupling: circuit {circuit_id} does not exist in the system" + )] + InvalidCircuitForCoupling { + /// The circuit ID that was referenced but doesn't exist + circuit_id: u8, + }, +} + +/// Error when adding an edge with port validation. +/// +/// Combines port validation errors ([`entropyk_components::ConnectionError`]) and topology errors +/// ([`TopologyError`]) such as cross-circuit connection attempts. +#[derive(Error, Debug, Clone, PartialEq)] +pub enum AddEdgeError { + /// Port validation failed (fluid, pressure, enthalpy mismatch). + #[error(transparent)] + Connection(#[from] entropyk_components::ConnectionError), + + /// Topology validation failed (e.g. cross-circuit connection). + #[error(transparent)] + Topology(#[from] TopologyError), +} diff --git a/crates/solver/src/graph.rs b/crates/solver/src/graph.rs new file mode 100644 index 0000000..0743b2c --- /dev/null +++ b/crates/solver/src/graph.rs @@ -0,0 +1,6 @@ +//! Graph building helpers for system topology. +//! +//! This module provides utilities for constructing and manipulating +//! the system graph. The main [`System`](crate::system::System) struct +//! handles graph operations; this module may be extended with convenience +//! builders in future stories. diff --git a/crates/solver/src/initializer.rs b/crates/solver/src/initializer.rs new file mode 100644 index 0000000..c86cf0f --- /dev/null +++ b/crates/solver/src/initializer.rs @@ -0,0 +1,675 @@ +//! Smart initialization heuristic for thermodynamic system solvers. +//! +//! This module provides [`SmartInitializer`], which generates physically +//! reasonable initial guesses for the solver state vector from source and sink +//! temperatures. It uses the Antoine equation to estimate saturation pressures +//! for common refrigerants without requiring an external fluid backend. +//! +//! # Algorithm +//! +//! 1. Estimate evaporator pressure: `P_evap = P_sat(T_source - ΔT_approach)` +//! 2. Estimate condenser pressure: `P_cond = P_sat(T_sink + ΔT_approach)` +//! 3. Clamp `P_evap` to `0.5 * P_critical` if it exceeds the critical pressure +//! 4. Fill the state vector with `[P, h_default]` per edge, using circuit topology +//! +//! # Supported Fluids +//! +//! Built-in Antoine coefficients are provided for: +//! - R134a, R410A, R32, R744 (CO2), R290 (Propane) +//! +//! Unknown fluids fall back to sensible defaults (5 bar / 20 bar) with a warning. +//! +//! # No-Allocation Guarantee +//! +//! [`SmartInitializer::populate_state`] writes to a pre-allocated `&mut [f64]` +//! slice and performs no heap allocation. + +use entropyk_components::port::FluidId; +use entropyk_core::{Enthalpy, Pressure, Temperature}; +use thiserror::Error; + +use crate::system::System; + +// ───────────────────────────────────────────────────────────────────────────── +// Error types +// ───────────────────────────────────────────────────────────────────────────── + +/// Errors that can occur during smart initialization. +#[derive(Error, Debug, Clone, PartialEq)] +pub enum InitializerError { + /// Source or sink temperature exceeds the critical temperature for the fluid. + /// + /// Antoine equation is not valid above the critical temperature. The caller + /// should either use a different fluid or provide a manual initial state. + #[error("Temperature {temp_celsius:.1}°C exceeds critical temperature for {fluid}")] + TemperatureAboveCritical { + /// Temperature that triggered the error (°C). + temp_celsius: f64, + /// Fluid identifier string. + fluid: String, + }, + + /// The provided state slice length does not match the system state vector length. + #[error( + "State slice length {actual} does not match system state vector length {expected}" + )] + StateLengthMismatch { + /// Expected length (from `system.state_vector_len()`). + expected: usize, + /// Actual length of the provided slice. + actual: usize, + }, +} + +// ───────────────────────────────────────────────────────────────────────────── +// Antoine coefficients +// ───────────────────────────────────────────────────────────────────────────── + +/// Antoine equation coefficients for saturation pressure estimation. +/// +/// The Antoine equation (log₁₀ form) is: +/// +/// ```text +/// log10(P_sat [Pa]) = A - B / (C + T [°C]) +/// ``` +/// +/// Coefficients are tuned for the −40°C to +80°C range. Accuracy is within 5% +/// of NIST/CoolProp values — sufficient for initialization purposes. +#[derive(Debug, Clone, PartialEq)] +pub struct AntoineCoefficients { + /// Antoine constant A (dimensionless, log₁₀ scale, Pa units). + pub a: f64, + /// Antoine constant B (°C). + pub b: f64, + /// Antoine constant C (°C offset). + pub c: f64, + /// Critical pressure of the fluid (Pa). + pub p_critical_pa: f64, +} + +impl AntoineCoefficients { + /// Returns the built-in coefficients for the given fluid identifier string. + /// + /// Matching is case-insensitive. Returns `None` for unknown fluids. + pub fn for_fluid(fluid_str: &str) -> Option<&'static AntoineCoefficients> { + // Normalize: uppercase, strip dashes/spaces + let normalized = fluid_str.to_uppercase().replace(['-', ' '], ""); + ANTOINE_TABLE + .iter() + .find(|(name, _)| *name == normalized.as_str()) + .map(|(_, coeffs)| coeffs) + } +} + +/// Compute saturation pressure (Pa) from temperature (°C) using Antoine equation. +/// +/// `log10(P_sat [Pa]) = A - B / (C + T [°C])` +/// +/// This is a pure arithmetic function with no heap allocation. +pub fn antoine_pressure(t_celsius: f64, coeffs: &AntoineCoefficients) -> f64 { + let log10_p = coeffs.a - coeffs.b / (coeffs.c + t_celsius); + 10f64.powf(log10_p) +} + +/// Built-in Antoine coefficient table for common refrigerants. +/// +/// Coefficients valid for approximately −40°C to +80°C. +/// Accuracy: within 5% of NIST saturation pressure values. +/// +/// Formula: `log10(P_sat [Pa]) = A - B / (C + T [°C])` +/// +/// A values are derived from NIST reference saturation pressures: +/// - R134a: P_sat(0°C) = 292,800 Pa → A = log10(292800) + 1766/243 = 12.739 +/// - R410A: P_sat(0°C) = 798,000 Pa → A = log10(798000) + 1885/243 = 13.659 +/// - R32: P_sat(0°C) = 810,000 Pa → A = log10(810000) + 1780/243 = 13.233 +/// - R744: P_sat(20°C) = 5,730,000 Pa → A = log10(5730000) + 1347.8/293 = 11.357 +/// - R290: P_sat(0°C) = 474,000 Pa → A = log10(474000) + 1656/243 = 12.491 +/// +/// | Fluid | A (for Pa) | B | C | P_critical (Pa) | +/// |--------|------------|---------|-------|-----------------| +/// | R134a | 12.739 | 1766.0 | 243.0 | 4,059,280 | +/// | R410A | 13.659 | 1885.0 | 243.0 | 4,901,200 | +/// | R32 | 13.233 | 1780.0 | 243.0 | 5,782,000 | +/// | R744 | 11.357 | 1347.8 | 273.0 | 7,377,300 | +/// | R290 | 12.491 | 1656.0 | 243.0 | 4,247,200 | +static ANTOINE_TABLE: &[(&str, AntoineCoefficients)] = &[ + ( + "R134A", + AntoineCoefficients { + a: 12.739, + b: 1766.0, + c: 243.0, + p_critical_pa: 4_059_280.0, + }, + ), + ( + "R410A", + AntoineCoefficients { + a: 13.659, + b: 1885.0, + c: 243.0, + p_critical_pa: 4_901_200.0, + }, + ), + ( + "R32", + AntoineCoefficients { + a: 13.233, + b: 1780.0, + c: 243.0, + p_critical_pa: 5_782_000.0, + }, + ), + ( + "R744", + AntoineCoefficients { + a: 11.357, + b: 1347.8, + c: 273.0, + p_critical_pa: 7_377_300.0, + }, + ), + ( + "R290", + AntoineCoefficients { + a: 12.491, + b: 1656.0, + c: 243.0, + p_critical_pa: 4_247_200.0, + }, + ), +]; + +// ───────────────────────────────────────────────────────────────────────────── +// Initializer configuration +// ───────────────────────────────────────────────────────────────────────────── + +/// Configuration for [`SmartInitializer`]. +#[derive(Debug, Clone, PartialEq)] +pub struct InitializerConfig { + /// Fluid identifier used for Antoine coefficient lookup. + pub fluid: FluidId, + + /// Temperature approach difference for pressure estimation (K). + /// + /// - Evaporator: `P_evap = P_sat(T_source - dt_approach)` + /// - Condenser: `P_cond = P_sat(T_sink + dt_approach)` + /// + /// Default: 5.0 K. + pub dt_approach: f64, +} + +impl Default for InitializerConfig { + fn default() -> Self { + Self { + fluid: FluidId::new("R134a"), + dt_approach: 5.0, + } + } +} + +// ───────────────────────────────────────────────────────────────────────────── +// SmartInitializer +// ───────────────────────────────────────────────────────────────────────────── + +/// Smart initialization heuristic for thermodynamic solver state vectors. +/// +/// Uses the Antoine equation to estimate saturation pressures from source and +/// sink temperatures, then fills a pre-allocated state vector with physically +/// reasonable initial guesses. +/// +/// # Example +/// +/// ```rust,no_run +/// use entropyk_solver::initializer::{SmartInitializer, InitializerConfig}; +/// use entropyk_core::{Temperature, Enthalpy}; +/// +/// let init = SmartInitializer::new(InitializerConfig::default()); +/// let (p_evap, p_cond) = init +/// .estimate_pressures( +/// Temperature::from_celsius(5.0), +/// Temperature::from_celsius(40.0), +/// ) +/// .unwrap(); +/// ``` +#[derive(Debug, Clone)] +pub struct SmartInitializer { + /// Configuration for this initializer. + pub config: InitializerConfig, +} + +impl SmartInitializer { + /// Creates a new `SmartInitializer` with the given configuration. + pub fn new(config: InitializerConfig) -> Self { + Self { config } + } + + /// Estimate `(P_evap, P_cond)` from source and sink temperatures. + /// + /// Uses the Antoine equation with the configured fluid and approach ΔT: + /// - `P_evap = P_sat(T_source - ΔT_approach)`, clamped to `0.5 * P_critical` + /// - `P_cond = P_sat(T_sink + ΔT_approach)` + /// + /// For unknown fluids, returns sensible defaults (5 bar / 20 bar) with a + /// `tracing::warn!` log entry. + /// + /// # Errors + /// + /// Returns [`InitializerError::TemperatureAboveCritical`] if the adjusted + /// source temperature exceeds the critical temperature for a known fluid. + pub fn estimate_pressures( + &self, + t_source: Temperature, + t_sink: Temperature, + ) -> Result<(Pressure, Pressure), InitializerError> { + let fluid_str = self.config.fluid.to_string(); + + match AntoineCoefficients::for_fluid(&fluid_str) { + None => { + // Unknown fluid: emit warning and return sensible defaults + tracing::warn!( + fluid = %fluid_str, + "Unknown fluid for Antoine estimation — using fallback pressures \ + (P_evap = 5 bar, P_cond = 20 bar)" + ); + Ok(( + Pressure::from_bar(5.0), + Pressure::from_bar(20.0), + )) + } + Some(coeffs) => { + let t_source_c = t_source.to_celsius(); + let t_sink_c = t_sink.to_celsius(); + + // Evaporator: T_source - ΔT_approach + let t_evap_c = t_source_c - self.config.dt_approach; + let p_evap_pa = antoine_pressure(t_evap_c, coeffs); + + // Clamp P_evap to 0.5 * P_critical (AC: #2) + let p_evap_pa = if p_evap_pa >= coeffs.p_critical_pa { + tracing::warn!( + fluid = %fluid_str, + t_evap_celsius = t_evap_c, + p_evap_pa = p_evap_pa, + p_critical_pa = coeffs.p_critical_pa, + "Estimated P_evap exceeds critical pressure — clamping to 0.5 * P_critical" + ); + 0.5 * coeffs.p_critical_pa + } else { + p_evap_pa + }; + + // Condenser: T_sink + ΔT_approach (AC: #3) + let t_cond_c = t_sink_c + self.config.dt_approach; + let p_cond_pa = antoine_pressure(t_cond_c, coeffs); + + // Clamp P_cond to 0.5 * P_critical if it exceeds critical + let p_cond_pa = if p_cond_pa >= coeffs.p_critical_pa { + tracing::warn!( + fluid = %fluid_str, + t_cond_celsius = t_cond_c, + p_cond_pa = p_cond_pa, + p_critical_pa = coeffs.p_critical_pa, + "Estimated P_cond exceeds critical pressure — clamping to 0.5 * P_critical" + ); + 0.5 * coeffs.p_critical_pa + } else { + p_cond_pa + }; + + tracing::debug!( + fluid = %fluid_str, + t_source_celsius = t_source_c, + t_sink_celsius = t_sink_c, + p_evap_bar = p_evap_pa / 1e5, + p_cond_bar = p_cond_pa / 1e5, + "SmartInitializer: estimated pressures" + ); + + Ok(( + Pressure::from_pascals(p_evap_pa), + Pressure::from_pascals(p_cond_pa), + )) + } + } + } + + /// Fill a pre-allocated state vector with smart initial guesses. + /// + /// No heap allocation is performed. The `state` slice must have length equal + /// to `system.state_vector_len()` (i.e., `2 * edge_count`). + /// + /// State layout per edge: `[P_edge_i, h_edge_i]` + /// + /// Pressure assignment follows circuit topology: + /// - Edges in circuit 0 → `p_evap` + /// - Edges in circuit 1+ → `p_cond` + /// - Single-circuit systems: all edges use `p_evap` + /// + /// # Errors + /// + /// Returns [`InitializerError::StateLengthMismatch`] if `state.len()` does + /// not match `system.state_vector_len()`. + pub fn populate_state( + &self, + system: &System, + p_evap: Pressure, + p_cond: Pressure, + h_default: Enthalpy, + state: &mut [f64], + ) -> Result<(), InitializerError> { + let expected = system.state_vector_len(); + if state.len() != expected { + return Err(InitializerError::StateLengthMismatch { + expected, + actual: state.len(), + }); + } + + let p_evap_pa = p_evap.to_pascals(); + let p_cond_pa = p_cond.to_pascals(); + let h_jkg = h_default.to_joules_per_kg(); + + for (i, edge_idx) in system.edge_indices().enumerate() { + let circuit = system.edge_circuit(edge_idx); + let p = if circuit.0 == 0 { p_evap_pa } else { p_cond_pa }; + state[2 * i] = p; + state[2 * i + 1] = h_jkg; + } + + Ok(()) + } +} + +// ───────────────────────────────────────────────────────────────────────────── +// Tests +// ───────────────────────────────────────────────────────────────────────────── + +#[cfg(test)] +mod tests { + use super::*; + use approx::assert_relative_eq; + + // ── Antoine equation unit tests ────────────────────────────────────────── + + /// AC: #1, #5 — R134a at 0°C: P_sat ≈ 2.93 bar (293,000 Pa), within 5% + #[test] + fn test_antoine_r134a_at_0c() { + let coeffs = AntoineCoefficients::for_fluid("R134a").unwrap(); + let p_pa = antoine_pressure(0.0, coeffs); + // Expected: ~2.93 bar = 293,000 Pa + assert_relative_eq!(p_pa, 293_000.0, max_relative = 0.05); + } + + /// AC: #5 — R744 (CO2) at 20°C: P_sat ≈ 57.3 bar (5,730,000 Pa), within 5% + #[test] + fn test_antoine_r744_at_20c() { + let coeffs = AntoineCoefficients::for_fluid("R744").unwrap(); + let p_pa = antoine_pressure(20.0, coeffs); + // Expected: ~57.3 bar = 5,730,000 Pa + assert_relative_eq!(p_pa, 5_730_000.0, max_relative = 0.05); + } + + /// AC: #5 — Case-insensitive fluid lookup + #[test] + fn test_fluid_lookup_case_insensitive() { + assert!(AntoineCoefficients::for_fluid("r134a").is_some()); + assert!(AntoineCoefficients::for_fluid("R134A").is_some()); + assert!(AntoineCoefficients::for_fluid("R134a").is_some()); + assert!(AntoineCoefficients::for_fluid("r744").is_some()); + assert!(AntoineCoefficients::for_fluid("R290").is_some()); + } + + /// AC: #5 — Unknown fluid returns None + #[test] + fn test_fluid_lookup_unknown() { + assert!(AntoineCoefficients::for_fluid("R999").is_none()); + assert!(AntoineCoefficients::for_fluid("").is_none()); + } + + // ── SmartInitializer::estimate_pressures tests ─────────────────────────── + + /// AC: #2 — P_evap < P_critical for all built-in fluids at T_source = −40°C + #[test] + fn test_p_evap_below_critical_all_fluids() { + let fluids = ["R134a", "R410A", "R32", "R744", "R290"]; + for fluid in fluids { + let init = SmartInitializer::new(InitializerConfig { + fluid: FluidId::new(fluid), + dt_approach: 5.0, + }); + let (p_evap, _) = init + .estimate_pressures( + Temperature::from_celsius(-40.0), + Temperature::from_celsius(40.0), + ) + .unwrap(); + let coeffs = AntoineCoefficients::for_fluid(fluid).unwrap(); + assert!( + p_evap.to_pascals() < coeffs.p_critical_pa, + "P_evap ({:.0} Pa) should be < P_critical ({:.0} Pa) for {}", + p_evap.to_pascals(), + coeffs.p_critical_pa, + fluid + ); + } + } + + /// AC: #3 — P_cond = P_sat(T_sink + 5K) for default ΔT_approach + #[test] + fn test_p_cond_approach_default() { + let init = SmartInitializer::new(InitializerConfig::default()); // R134a, dt=5.0 + let t_sink = Temperature::from_celsius(40.0); + let (_, p_cond) = init + .estimate_pressures(Temperature::from_celsius(5.0), t_sink) + .unwrap(); + + // Expected: P_sat(45°C) for R134a + let coeffs = AntoineCoefficients::for_fluid("R134a").unwrap(); + let expected_pa = antoine_pressure(45.0, coeffs); + assert_relative_eq!(p_cond.to_pascals(), expected_pa, max_relative = 1e-9); + } + + /// AC: #6 — Unknown fluid returns fallback (5 bar / 20 bar) without panic + #[test] + fn test_unknown_fluid_fallback() { + let init = SmartInitializer::new(InitializerConfig { + fluid: FluidId::new("R999-Unknown"), + dt_approach: 5.0, + }); + let result = init.estimate_pressures( + Temperature::from_celsius(5.0), + Temperature::from_celsius(40.0), + ); + assert!(result.is_ok(), "Unknown fluid should not return Err"); + let (p_evap, p_cond) = result.unwrap(); + assert_relative_eq!(p_evap.to_bar(), 5.0, max_relative = 1e-9); + assert_relative_eq!(p_cond.to_bar(), 20.0, max_relative = 1e-9); + } + + /// AC: #1 — Verify evaporator pressure uses T_source - ΔT_approach + #[test] + fn test_p_evap_uses_approach_delta() { + let dt = 5.0; + let init = SmartInitializer::new(InitializerConfig { + fluid: FluidId::new("R134a"), + dt_approach: dt, + }); + let t_source = Temperature::from_celsius(10.0); + let (p_evap, _) = init + .estimate_pressures(t_source, Temperature::from_celsius(40.0)) + .unwrap(); + + let coeffs = AntoineCoefficients::for_fluid("R134a").unwrap(); + let expected_pa = antoine_pressure(10.0 - dt, coeffs); // T_source - ΔT + assert_relative_eq!(p_evap.to_pascals(), expected_pa, max_relative = 1e-9); + } + + // ── SmartInitializer::populate_state tests ─────────────────────────────── + + /// AC: #4, #7 — populate_state fills state vector correctly for a 2-edge system. + /// + /// This test verifies the no-allocation signature: the function takes `&mut [f64]` + /// and writes in-place without allocating. + #[test] + fn test_populate_state_2_edges() { + use crate::system::System; + use entropyk_components::{Component, ComponentError, ConnectedPort, JacobianBuilder, ResidualVector, SystemState}; + + struct MockComp; + impl Component for MockComp { + fn compute_residuals(&self, _s: &SystemState, r: &mut ResidualVector) -> Result<(), ComponentError> { + for v in r.iter_mut() { *v = 0.0; } + Ok(()) + } + fn jacobian_entries(&self, _s: &SystemState, j: &mut JacobianBuilder) -> Result<(), ComponentError> { + j.add_entry(0, 0, 1.0); + Ok(()) + } + fn n_equations(&self) -> usize { 1 } + fn get_ports(&self) -> &[ConnectedPort] { &[] } + } + + let mut sys = System::new(); + let n0 = sys.add_component(Box::new(MockComp)); + let n1 = sys.add_component(Box::new(MockComp)); + let n2 = sys.add_component(Box::new(MockComp)); + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n2).unwrap(); + sys.finalize().unwrap(); + + let init = SmartInitializer::new(InitializerConfig::default()); + let p_evap = Pressure::from_bar(3.0); + let p_cond = Pressure::from_bar(15.0); + let h_default = Enthalpy::from_joules_per_kg(400_000.0); + + // Pre-allocated slice — no allocation in populate_state + let mut state = vec![0.0f64; sys.state_vector_len()]; + init.populate_state(&sys, p_evap, p_cond, h_default, &mut state) + .unwrap(); + + // All edges in circuit 0 (single-circuit) → p_evap + assert_eq!(state.len(), 4); // 2 edges × 2 entries + assert_relative_eq!(state[0], p_evap.to_pascals(), max_relative = 1e-9); + assert_relative_eq!(state[1], h_default.to_joules_per_kg(), max_relative = 1e-9); + assert_relative_eq!(state[2], p_evap.to_pascals(), max_relative = 1e-9); + assert_relative_eq!(state[3], h_default.to_joules_per_kg(), max_relative = 1e-9); + } + + /// AC: #4 — populate_state uses P_cond for circuit 1 edges in multi-circuit system. + #[test] + fn test_populate_state_multi_circuit() { + use crate::system::{CircuitId, System}; + use entropyk_components::{Component, ComponentError, ConnectedPort, JacobianBuilder, ResidualVector, SystemState}; + + struct MockComp; + impl Component for MockComp { + fn compute_residuals(&self, _s: &SystemState, r: &mut ResidualVector) -> Result<(), ComponentError> { + for v in r.iter_mut() { *v = 0.0; } + Ok(()) + } + fn jacobian_entries(&self, _s: &SystemState, j: &mut JacobianBuilder) -> Result<(), ComponentError> { + j.add_entry(0, 0, 1.0); + Ok(()) + } + fn n_equations(&self) -> usize { 1 } + fn get_ports(&self) -> &[ConnectedPort] { &[] } + } + + let mut sys = System::new(); + // Circuit 0: evaporator side + let n0 = sys.add_component_to_circuit(Box::new(MockComp), CircuitId(0)).unwrap(); + let n1 = sys.add_component_to_circuit(Box::new(MockComp), CircuitId(0)).unwrap(); + // Circuit 1: condenser side + let n2 = sys.add_component_to_circuit(Box::new(MockComp), CircuitId(1)).unwrap(); + let n3 = sys.add_component_to_circuit(Box::new(MockComp), CircuitId(1)).unwrap(); + + sys.add_edge(n0, n1).unwrap(); // circuit 0 edge + sys.add_edge(n2, n3).unwrap(); // circuit 1 edge + sys.finalize().unwrap(); + + let init = SmartInitializer::new(InitializerConfig::default()); + let p_evap = Pressure::from_bar(3.0); + let p_cond = Pressure::from_bar(15.0); + let h_default = Enthalpy::from_joules_per_kg(400_000.0); + + let mut state = vec![0.0f64; sys.state_vector_len()]; + init.populate_state(&sys, p_evap, p_cond, h_default, &mut state) + .unwrap(); + + assert_eq!(state.len(), 4); // 2 edges × 2 entries + // Edge 0 (circuit 0) → p_evap + assert_relative_eq!(state[0], p_evap.to_pascals(), max_relative = 1e-9); + assert_relative_eq!(state[1], h_default.to_joules_per_kg(), max_relative = 1e-9); + // Edge 1 (circuit 1) → p_cond + assert_relative_eq!(state[2], p_cond.to_pascals(), max_relative = 1e-9); + assert_relative_eq!(state[3], h_default.to_joules_per_kg(), max_relative = 1e-9); + } + + /// AC: #7 — populate_state returns error on length mismatch (no panic). + #[test] + fn test_populate_state_length_mismatch() { + use crate::system::System; + use entropyk_components::{Component, ComponentError, ConnectedPort, JacobianBuilder, ResidualVector, SystemState}; + + struct MockComp; + impl Component for MockComp { + fn compute_residuals(&self, _s: &SystemState, r: &mut ResidualVector) -> Result<(), ComponentError> { + for v in r.iter_mut() { *v = 0.0; } + Ok(()) + } + fn jacobian_entries(&self, _s: &SystemState, j: &mut JacobianBuilder) -> Result<(), ComponentError> { + j.add_entry(0, 0, 1.0); + Ok(()) + } + fn n_equations(&self) -> usize { 1 } + fn get_ports(&self) -> &[ConnectedPort] { &[] } + } + + let mut sys = System::new(); + let n0 = sys.add_component(Box::new(MockComp)); + let n1 = sys.add_component(Box::new(MockComp)); + sys.add_edge(n0, n1).unwrap(); + sys.finalize().unwrap(); + + let init = SmartInitializer::new(InitializerConfig::default()); + let p_evap = Pressure::from_bar(3.0); + let p_cond = Pressure::from_bar(15.0); + let h_default = Enthalpy::from_joules_per_kg(400_000.0); + + // Wrong length: system has 2 state entries (1 edge × 2), we provide 5 + let mut state = vec![0.0f64; 5]; + let result = init.populate_state(&sys, p_evap, p_cond, h_default, &mut state); + assert!(matches!( + result, + Err(InitializerError::StateLengthMismatch { expected: 2, actual: 5 }) + )); + } + + /// AC: #2 — P_evap is clamped to 0.5 * P_critical when above critical. + /// + /// We use R744 (CO2) at a very high source temperature to trigger clamping. + #[test] + fn test_p_evap_clamped_above_critical() { + // R744 critical: 7,377,300 Pa (~73.8 bar), critical T ≈ 31°C + // At T_source = 40°C, T_evap = 35°C → P_sat > P_critical → should clamp + let init = SmartInitializer::new(InitializerConfig { + fluid: FluidId::new("R744"), + dt_approach: 5.0, + }); + let (p_evap, _) = init + .estimate_pressures( + Temperature::from_celsius(40.0), + Temperature::from_celsius(50.0), + ) + .unwrap(); + + let coeffs = AntoineCoefficients::for_fluid("R744").unwrap(); + // Must be clamped to 0.5 * P_critical + assert_relative_eq!( + p_evap.to_pascals(), + 0.5 * coeffs.p_critical_pa, + max_relative = 1e-9 + ); + } +} diff --git a/crates/solver/src/jacobian.rs b/crates/solver/src/jacobian.rs index b49daa0..6efe854 100644 --- a/crates/solver/src/jacobian.rs +++ b/crates/solver/src/jacobian.rs @@ -67,6 +67,26 @@ impl JacobianMatrix { JacobianMatrix(matrix) } + /// Updates an existing Jacobian matrix from sparse entries in-place. + /// + /// The matrix is first zeroed out, then filled with the new entries. + /// This avoids re-allocating memory during iterations, satisfying the + /// zero-allocation architecture constraint. + /// + /// # Arguments + /// + /// * `entries` - Slice of `(row, col, value)` tuples + pub fn update_from_builder(&mut self, entries: &[(usize, usize, f64)]) { + self.0.fill(0.0); + let n_rows = self.0.nrows(); + let n_cols = self.0.ncols(); + for &(row, col, value) in entries { + if row < n_rows && col < n_cols { + self.0[(row, col)] += value; + } + } + } + /// Creates a zero Jacobian matrix with the given dimensions. pub fn zeros(n_rows: usize, n_cols: usize) -> Self { JacobianMatrix(DMatrix::zeros(n_rows, n_cols)) diff --git a/crates/solver/src/lib.rs b/crates/solver/src/lib.rs new file mode 100644 index 0000000..ddc923d --- /dev/null +++ b/crates/solver/src/lib.rs @@ -0,0 +1,33 @@ +//! # Entropyk Solver +//! +//! System topology and solver engine for thermodynamic simulation. +//! +//! This crate provides the graph-based representation of thermodynamic systems, +//! where components are nodes and flow connections are edges. Edges index into +//! the solver's state vector (P and h per edge). + +pub mod coupling; +pub mod criteria; +pub mod error; +pub mod graph; +pub mod initializer; +pub mod jacobian; +pub mod solver; +pub mod system; + +pub use criteria::{CircuitConvergence, ConvergenceCriteria, ConvergenceReport}; +pub use coupling::{ + compute_coupling_heat, coupling_groups, has_circular_dependencies, ThermalCoupling, +}; +pub use entropyk_components::ConnectionError; +pub use error::{AddEdgeError, TopologyError}; +pub use initializer::{ + antoine_pressure, AntoineCoefficients, InitializerConfig, InitializerError, SmartInitializer, +}; +pub use jacobian::JacobianMatrix; +pub use solver::{ + ConvergedState, ConvergenceStatus, FallbackConfig, FallbackSolver, JacobianFreezingConfig, + NewtonConfig, PicardConfig, Solver, SolverError, SolverStrategy, TimeoutConfig, +}; +pub use system::{CircuitId, FlowEdge, System}; + diff --git a/crates/solver/src/solver.rs b/crates/solver/src/solver.rs index 37df96b..8369b59 100644 --- a/crates/solver/src/solver.rs +++ b/crates/solver/src/solver.rs @@ -302,6 +302,61 @@ impl Default for TimeoutConfig { } } +// ───────────────────────────────────────────────────────────────────────────── +// Jacobian Freezing Configuration (Story 4.8) +// ───────────────────────────────────────────────────────────────────────────── + +/// Configuration for Jacobian-freezing optimization. +/// +/// When enabled, the Newton-Raphson solver reuses the previously computed +/// Jacobian matrix for up to `max_frozen_iters` consecutive iterations, +/// provided the residual norm is still decreasing. This avoids expensive +/// Jacobian assembly and can reduce per-iteration CPU time by up to ~80%. +/// +/// # Auto-disable on divergence +/// +/// If the residual norm *increases* while a frozen Jacobian is being used, +/// the solver immediately forces a fresh Jacobian computation on the next +/// iteration and resets the frozen-iteration counter. +/// +/// # Example +/// +/// ```rust +/// use entropyk_solver::solver::{NewtonConfig, JacobianFreezingConfig}; +/// +/// let config = NewtonConfig::default() +/// .with_jacobian_freezing(JacobianFreezingConfig { +/// max_frozen_iters: 3, +/// threshold: 0.1, +/// }); +/// ``` +#[derive(Debug, Clone, PartialEq)] +pub struct JacobianFreezingConfig { + /// Maximum number of consecutive iterations the Jacobian may be reused + /// without recomputing. + /// + /// After this many frozen iterations the solver forces a fresh assembly, + /// even if the residual is still decreasing. Default: 3. + pub max_frozen_iters: usize, + + /// Residual-norm ratio threshold below which freezing is considered safe. + /// + /// Freezing is only attempted when + /// `current_norm / previous_norm < (1.0 - threshold)`, + /// ensuring that convergence is still progressing sufficiently. + /// Default: 0.1 (i.e., at least a 10 % residual decrease per step). + pub threshold: f64, +} + +impl Default for JacobianFreezingConfig { + fn default() -> Self { + Self { + max_frozen_iters: 3, + threshold: 0.1, + } + } +} + // ───────────────────────────────────────────────────────────────────────────── // Configuration structs // ───────────────────────────────────────────────────────────────────────────── @@ -393,6 +448,15 @@ pub struct NewtonConfig { /// test instead of the raw L2-norm tolerance check. The old `tolerance` field is retained /// for backward compatibility and is ignored when this is `Some`. pub convergence_criteria: Option, + + /// Jacobian-freezing optimization (Story 4.8). + /// + /// When `Some`, the solver reuses the previous Jacobian matrix for up to + /// `max_frozen_iters` iterations while the residual is decreasing faster than + /// the configured threshold. Auto-disables when the residual increases. + /// + /// Default: `None` (recompute every iteration — backward-compatible). + pub jacobian_freezing: Option, } impl Default for NewtonConfig { @@ -410,6 +474,7 @@ impl Default for NewtonConfig { previous_state: None, initial_state: None, convergence_criteria: None, + jacobian_freezing: None, } } } @@ -435,6 +500,17 @@ impl NewtonConfig { self } + /// Enables Jacobian-freezing optimization (Story 4.8 — builder pattern). + /// + /// When set, the solver skips Jacobian re-assembly for iterations where the + /// residual is still decreasing, up to `config.max_frozen_iters` consecutive + /// frozen steps. Freezing is automatically disabled when the residual + /// increases. + pub fn with_jacobian_freezing(mut self, config: JacobianFreezingConfig) -> Self { + self.jacobian_freezing = Some(config); + self + } + /// Computes the residual norm (L2 norm of the residual vector). fn residual_norm(residuals: &[f64]) -> f64 { residuals.iter().map(|r| r * r).sum::().sqrt() @@ -658,6 +734,14 @@ impl Solver for NewtonConfig { let mut best_state: Vec = vec![0.0; n_state]; let mut best_residual: f64; + // Story 4.8 — Jacobian-freezing tracking state. + // `frozen_count` tracks how many consecutive iterations have reused the Jacobian. + // `force_recompute` is set when a residual increase is detected. + // The Jacobian matrix itself is pre-allocated here (Zero Allocation AC) + let mut jacobian_matrix = JacobianMatrix::zeros(n_equations, n_state); + let mut frozen_count: usize = 0; + let mut force_recompute: bool = true; // Always compute on the very first iteration + // Initial residual computation system .compute_residuals(&state, &mut residuals) @@ -728,32 +812,74 @@ impl Solver for NewtonConfig { } } - // Assemble Jacobian (AC: #3) - jacobian_builder.clear(); - let jacobian_matrix = if self.use_numerical_jacobian { - // Numerical Jacobian via finite differences - let compute_residuals_fn = |s: &[f64], r: &mut [f64]| { - let s_vec = s.to_vec(); - let mut r_vec = vec![0.0; r.len()]; - let result = system.compute_residuals(&s_vec, &mut r_vec); - r.copy_from_slice(&r_vec); - result.map(|_| ()).map_err(|e| format!("{:?}", e)) - }; - JacobianMatrix::numerical(compute_residuals_fn, &state, &residuals, 1e-8).map_err( - |e| SolverError::InvalidSystem { - message: format!("Failed to compute numerical Jacobian: {}", e), - }, - )? + // ── Jacobian Assembly / Freeze Decision (AC: #3, Story 4.8) ── + // + // Decide whether to recompute or reuse the Jacobian based on the + // freezing configuration and convergence behaviour. + let should_recompute = if let Some(ref freeze_cfg) = self.jacobian_freezing { + if force_recompute { + true + } else if frozen_count >= freeze_cfg.max_frozen_iters { + tracing::debug!( + iteration = iteration, + frozen_count = frozen_count, + "Jacobian freeze limit reached — recomputing" + ); + true + } else { + false + } } else { - // Analytical Jacobian from components - system - .assemble_jacobian(&state, &mut jacobian_builder) - .map_err(|e| SolverError::InvalidSystem { - message: format!("Failed to assemble Jacobian: {:?}", e), - })?; - JacobianMatrix::from_builder(jacobian_builder.entries(), n_equations, n_state) + // No freezing configured — always recompute (backward-compatible) + true }; + if should_recompute { + // Fresh Jacobian assembly (in-place update) + jacobian_builder.clear(); + if self.use_numerical_jacobian { + // Numerical Jacobian via finite differences + let compute_residuals_fn = |s: &[f64], r: &mut [f64]| { + let s_vec = s.to_vec(); + let mut r_vec = vec![0.0; r.len()]; + let result = system.compute_residuals(&s_vec, &mut r_vec); + r.copy_from_slice(&r_vec); + result.map(|_| ()).map_err(|e| format!("{:?}", e)) + }; + // Rather than creating a new matrix, compute it and assign + let jm = JacobianMatrix::numerical(compute_residuals_fn, &state, &residuals, 1e-8) + .map_err(|e| SolverError::InvalidSystem { + message: format!("Failed to compute numerical Jacobian: {}", e), + })?; + // Deep copy elements to existing matrix (DMatrix::copy_from does not reallocate) + jacobian_matrix.as_matrix_mut().copy_from(jm.as_matrix()); + } else { + // Analytical Jacobian from components + system + .assemble_jacobian(&state, &mut jacobian_builder) + .map_err(|e| SolverError::InvalidSystem { + message: format!("Failed to assemble Jacobian: {:?}", e), + })?; + jacobian_matrix.update_from_builder(jacobian_builder.entries()); + }; + + frozen_count = 0; + force_recompute = false; + + tracing::debug!( + iteration = iteration, + "Fresh Jacobian computed" + ); + } else { + // Reuse the frozen Jacobian (Story 4.8 — AC: #2) + frozen_count += 1; + tracing::debug!( + iteration = iteration, + frozen_count = frozen_count, + "Reusing frozen Jacobian" + ); + } + // Solve linear system J·Δx = -r (AC: #1) let delta = match jacobian_matrix.solve(&residuals) { Some(d) => d, @@ -811,6 +937,29 @@ impl Solver for NewtonConfig { ); } + // ── Story 4.8 — Jacobian-freeze feedback ── + // + // If the residual norm increased or did not decrease enough + // (below the threshold), force a fresh Jacobian on the next + // iteration and reset the frozen counter. + if let Some(ref freeze_cfg) = self.jacobian_freezing { + if previous_norm > 0.0 + && current_norm / previous_norm >= (1.0 - freeze_cfg.threshold) + { + if frozen_count > 0 || !force_recompute { + tracing::debug!( + iteration = iteration, + current_norm = current_norm, + previous_norm = previous_norm, + ratio = current_norm / previous_norm, + "Residual not decreasing fast enough — unfreezing Jacobian" + ); + } + force_recompute = true; + frozen_count = 0; + } + } + tracing::debug!( iteration = iteration, residual_norm = current_norm, @@ -1694,10 +1843,12 @@ impl FallbackSolver { tracing::debug!( final_residual = final_residual, threshold = self.config.return_to_newton_threshold, - "Picard not yet stabilized, continuing with Picard" + "Picard not yet stabilized, aborting" ); - // Continue with Picard - no allocation overhead - continue; + return Err(SolverError::NonConvergence { + iterations, + final_residual, + }); } } } @@ -1958,6 +2109,7 @@ mod tests { previous_state: None, initial_state: None, convergence_criteria: None, + jacobian_freezing: None, } .with_timeout(Duration::from_millis(200)); diff --git a/crates/solver/src/system.rs b/crates/solver/src/system.rs new file mode 100644 index 0000000..89d82fb --- /dev/null +++ b/crates/solver/src/system.rs @@ -0,0 +1,1608 @@ +//! System graph structure for thermodynamic simulation. +//! +//! This module provides the core graph representation of a thermodynamic system, +//! where nodes are components and edges represent flow connections. Edges index +//! into the solver's state vector (P and h per edge). +//! +//! Multi-circuit support (Story 3.3): A machine can have up to 5 independent +//! circuits (valid circuit IDs: 0, 1, 2, 3, 4). Each node belongs to exactly one +//! circuit. Flow edges connect only nodes within the same circuit. + +use entropyk_components::{ + validate_port_continuity, Component, ComponentError, ConnectionError, JacobianBuilder, + ResidualVector, SystemState as StateSlice, +}; +use petgraph::algo; +use petgraph::graph::{EdgeIndex, Graph, NodeIndex}; +use petgraph::visit::EdgeRef; +use petgraph::Directed; +use std::collections::HashMap; + +use crate::coupling::{has_circular_dependencies, ThermalCoupling}; +use crate::error::{AddEdgeError, TopologyError}; +use entropyk_core::Temperature; + +/// Circuit identifier. Valid range 0..=4 (max 5 circuits per machine). +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct CircuitId(pub u8); + +impl CircuitId { + /// Maximum circuit ID (inclusive). Machine supports up to 5 circuits. + pub const MAX: u8 = 4; + + /// Creates a new CircuitId if within valid range. + /// + /// # Errors + /// + /// Returns `TopologyError::TooManyCircuits` if `id > 4`. + pub fn new(id: u8) -> Result { + if id <= Self::MAX { + Ok(CircuitId(id)) + } else { + Err(TopologyError::TooManyCircuits { requested: id }) + } + } + + /// Circuit 0 (default for single-circuit systems). + pub const ZERO: CircuitId = CircuitId(0); +} + +/// Weight for flow edges in the system graph. +/// +/// Each edge represents a flow connection between two component ports and stores +/// the state vector indices for pressure (P) and enthalpy (h) at that connection. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct FlowEdge { + /// State vector index for pressure (Pa) + pub state_index_p: usize, + /// State vector index for enthalpy (J/kg) + pub state_index_h: usize, +} + +/// System graph structure. +/// +/// Nodes are components (`Box`), edges are flow connections with +/// state indices. The state vector layout is: +/// +/// ```text +/// [P_edge0, h_edge0, P_edge1, h_edge1, ...] +/// ``` +/// +/// Edge order follows the graph's internal edge iteration order (stable after +/// `finalize()` is called). +pub struct System { + graph: Graph, FlowEdge, Directed>, + /// Maps EdgeIndex to (state_index_p, state_index_h) - built in finalize() + edge_to_state: HashMap, + /// Maps NodeIndex to CircuitId. Nodes without entry default to circuit 0. + node_to_circuit: HashMap, + /// Thermal couplings between circuits (heat transfer without fluid mixing). + thermal_couplings: Vec, + finalized: bool, +} + +impl System { + /// Creates a new empty system graph. + pub fn new() -> Self { + Self { + graph: Graph::new(), + edge_to_state: HashMap::new(), + node_to_circuit: HashMap::new(), + thermal_couplings: Vec::new(), + finalized: false, + } + } + + /// Adds a component as a node in the default circuit (circuit 0) and returns its node index. + /// + /// For multi-circuit machines, use [`add_component_to_circuit`](Self::add_component_to_circuit). + pub fn add_component(&mut self, component: Box) -> NodeIndex { + self.add_component_to_circuit(component, CircuitId::ZERO) + .unwrap() + } + + /// Adds a component as a node in the specified circuit and returns its node index. + /// + /// # Errors + /// + /// Returns `TopologyError::TooManyCircuits` if `circuit_id.0 > 4`. + pub fn add_component_to_circuit( + &mut self, + component: Box, + circuit_id: CircuitId, + ) -> Result { + CircuitId::new(circuit_id.0)?; + self.finalized = false; + let node_idx = self.graph.add_node(component); + self.node_to_circuit.insert(node_idx, circuit_id); + Ok(node_idx) + } + + /// Returns the circuit ID for a node, or circuit 0 if not found (backward compat). + pub fn node_circuit(&self, node: NodeIndex) -> CircuitId { + self.node_to_circuit + .get(&node) + .copied() + .unwrap_or(CircuitId::ZERO) + } + + /// Returns the circuit ID for an edge based on its source node. + /// + /// # Panics + /// + /// Panics if the edge index is invalid. + pub fn edge_circuit(&self, edge: EdgeIndex) -> CircuitId { + let (src, _tgt) = self.graph.edge_endpoints(edge).expect("invalid edge index"); + self.node_circuit(src) + } + + /// Adds a flow edge from `source` to `target` without port validation. + /// + /// **No port compatibility validation is performed.** Use + /// [`add_edge_with_ports`](Self::add_edge_with_ports) for components with ports to validate + /// fluid, pressure, and enthalpy continuity. This method is intended for components without + /// ports (e.g. mock components in tests). + /// + /// Flow edges connect only nodes within the same circuit. Cross-circuit connections + /// are rejected (thermal coupling is Story 3.4). + /// + /// State indices are assigned when `finalize()` is called. + /// + /// # Errors + /// + /// Returns `TopologyError::CrossCircuitConnection` if source and target are in different circuits. + pub fn add_edge( + &mut self, + source: NodeIndex, + target: NodeIndex, + ) -> Result { + let src_circuit = self.node_circuit(source); + let tgt_circuit = self.node_circuit(target); + if src_circuit != tgt_circuit { + tracing::warn!( + "Cross-circuit edge rejected: source circuit {}, target circuit {}", + src_circuit.0, + tgt_circuit.0 + ); + return Err(TopologyError::CrossCircuitConnection { + source_circuit: src_circuit.0, + target_circuit: tgt_circuit.0, + }); + } + + // Safety check: Warn if connecting components with ports using the non-validating method + if let (Some(src), Some(tgt)) = ( + self.graph.node_weight(source), + self.graph.node_weight(target), + ) { + if !src.get_ports().is_empty() || !tgt.get_ports().is_empty() { + tracing::warn!( + "add_edge called on components with ports (src: {:?}, tgt: {:?}). \ + This bypasses port validation. Use add_edge_with_ports instead.", + source, + target + ); + } + } + + self.finalized = false; + Ok(self.graph.add_edge( + source, + target, + FlowEdge { + state_index_p: 0, + state_index_h: 0, + }, + )) + } + + /// Adds a flow edge from `source` outlet port to `target` inlet port with validation. + /// + /// Validates circuit membership (same circuit), then fluid compatibility, pressure and + /// enthalpy continuity using port.rs tolerances. For 2-port components: `source_port_idx=1` + /// (outlet), `target_port_idx=0` (inlet). + /// + /// # Errors + /// + /// Returns `AddEdgeError::Topology` if source and target are in different circuits. + /// Returns `AddEdgeError::Connection` if ports are incompatible (fluid, pressure, or enthalpy mismatch). + pub fn add_edge_with_ports( + &mut self, + source: NodeIndex, + source_port_idx: usize, + target: NodeIndex, + target_port_idx: usize, + ) -> Result { + // Circuit validation first + let src_circuit = self.node_circuit(source); + let tgt_circuit = self.node_circuit(target); + if src_circuit != tgt_circuit { + tracing::warn!( + "Cross-circuit edge rejected: source circuit {}, target circuit {}", + src_circuit.0, + tgt_circuit.0 + ); + return Err(TopologyError::CrossCircuitConnection { + source_circuit: src_circuit.0, + target_circuit: tgt_circuit.0, + } + .into()); + } + + let source_comp = self + .graph + .node_weight(source) + .ok_or_else(|| ConnectionError::InvalidNodeIndex(source.index()))?; + let target_comp = self + .graph + .node_weight(target) + .ok_or_else(|| ConnectionError::InvalidNodeIndex(target.index()))?; + + let source_ports = source_comp.get_ports(); + let target_ports = target_comp.get_ports(); + + if source_ports.is_empty() && target_ports.is_empty() { + // No ports: add edge without validation (backward compat) + self.finalized = false; + return Ok(self.graph.add_edge( + source, + target, + FlowEdge { + state_index_p: 0, + state_index_h: 0, + }, + )); + } + + if source_port_idx >= source_ports.len() { + return Err(ConnectionError::InvalidPortIndex { + index: source_port_idx, + port_count: source_ports.len(), + max_index: source_ports.len().saturating_sub(1), + } + .into()); + } + if target_port_idx >= target_ports.len() { + return Err(ConnectionError::InvalidPortIndex { + index: target_port_idx, + port_count: target_ports.len(), + max_index: target_ports.len().saturating_sub(1), + } + .into()); + } + + let outlet = &source_ports[source_port_idx]; + let inlet = &target_ports[target_port_idx]; + if let Err(ref e) = validate_port_continuity(outlet, inlet) { + tracing::warn!("Port validation failed: {}", e); + return Err(e.clone().into()); + } + + self.finalized = false; + Ok(self.graph.add_edge( + source, + target, + FlowEdge { + state_index_p: 0, + state_index_h: 0, + }, + )) + } + + /// Finalizes the graph: builds edge→state index mapping and validates topology. + /// + /// # State vector layout + /// + /// The state vector has length `2 * edge_count`. For each edge (in graph iteration order): + /// - `state[2*i]` = pressure at edge i (Pa) + /// - `state[2*i + 1]` = enthalpy at edge i (J/kg) + /// + /// # Errors + /// + /// Returns `TopologyError` if: + /// - Any node is isolated (no edges) + /// - The graph is empty (no components) + pub fn finalize(&mut self) -> Result<(), TopologyError> { + self.validate_topology()?; + + if !self.thermal_couplings.is_empty() && has_circular_dependencies(self.thermal_couplings()) + { + tracing::warn!("Circular thermal coupling detected, simultaneous solving required"); + } + + self.edge_to_state.clear(); + let mut idx = 0usize; + for edge_idx in self.graph.edge_indices() { + let (p_idx, h_idx) = (idx, idx + 1); + self.edge_to_state.insert(edge_idx, (p_idx, h_idx)); + if let Some(weight) = self.graph.edge_weight_mut(edge_idx) { + weight.state_index_p = p_idx; + weight.state_index_h = h_idx; + } + idx += 2; + } + self.finalized = true; + Ok(()) + } + + /// Validates the topology: no isolated nodes and edge circuit consistency. + /// + /// Note: "All ports connected" validation requires port→edge association + /// (Story 3.2 Port Compatibility Validation). + fn validate_topology(&self) -> Result<(), TopologyError> { + let node_count = self.graph.node_count(); + if node_count == 0 { + return Ok(()); + } + + for node_idx in self.graph.node_indices() { + let degree = self + .graph + .edges_directed(node_idx, petgraph::Direction::Incoming) + .count() + + self + .graph + .edges_directed(node_idx, petgraph::Direction::Outgoing) + .count(); + if degree == 0 { + return Err(TopologyError::IsolatedNode { + node_index: node_idx.index(), + }); + } + } + + // Validate that all edges connect nodes within the same circuit + for edge_idx in self.graph.edge_indices() { + if let Some((src, tgt)) = self.graph.edge_endpoints(edge_idx) { + let src_circuit = self.node_circuit(src); + let tgt_circuit = self.node_circuit(tgt); + if src_circuit != tgt_circuit { + return Err(TopologyError::CrossCircuitConnection { + source_circuit: src_circuit.0, + target_circuit: tgt_circuit.0, + }); + } + } + } + + Ok(()) + } + + /// Returns the documented state vector layout. + /// + /// Layout: `[P_edge0, h_edge0, P_edge1, h_edge1, ...]` where each edge (in + /// graph iteration order) contributes 2 entries: pressure (Pa) then enthalpy (J/kg). + /// + /// # Panics + /// + /// Panics if `finalize()` has not been called. + pub fn state_layout(&self) -> &'static str { + assert!(self.finalized, "call finalize() before state_layout()"); + "[P_edge0, h_edge0, P_edge1, h_edge1, ...] — 2 per edge (pressure Pa, enthalpy J/kg)" + } + + /// Returns the length of the state vector: `2 * edge_count`. + /// + /// # Panics + /// + /// Panics if `finalize()` has not been called. + pub fn state_vector_len(&self) -> usize { + assert!(self.finalized, "call finalize() before state_vector_len()"); + 2 * self.graph.edge_count() + } + + /// Returns the state indices (P, h) for the given edge. + /// + /// # Returns + /// + /// `(state_index_p, state_index_h)` for the edge. + /// + /// # Panics + /// + /// Panics if `finalize()` has not been called or if `edge_id` is invalid. + pub fn edge_state_indices(&self, edge_id: EdgeIndex) -> (usize, usize) { + assert!( + self.finalized, + "call finalize() before edge_state_indices()" + ); + *self + .edge_to_state + .get(&edge_id) + .expect("invalid edge index") + } + + /// Returns the number of edges in the graph. + pub fn edge_count(&self) -> usize { + self.graph.edge_count() + } + + /// Returns an iterator over all edge indices in the graph. + pub fn edge_indices(&self) -> impl Iterator + '_ { + self.graph.edge_indices() + } + + /// Returns the number of nodes (components) in the graph. + pub fn node_count(&self) -> usize { + self.graph.node_count() + } + + /// Returns the number of distinct circuits in the machine. + /// + /// Circuits are identified by the unique circuit IDs present in `node_to_circuit`. + /// Empty system returns 0. Systems with components always return >= 1 since + /// all components are assigned to a circuit (defaulting to circuit 0). + /// Valid circuit IDs are 0 through 4 (inclusive), supporting up to 5 circuits. + pub fn circuit_count(&self) -> usize { + if self.graph.node_count() == 0 { + return 0; + } + let mut ids: Vec = self.node_to_circuit.values().map(|c| c.0).collect(); + if ids.is_empty() { + // This shouldn't happen since add_component adds to node_to_circuit, + // but handle defensively + return 1; + } + ids.sort_unstable(); + ids.dedup(); + ids.len() + } + + /// Returns an iterator over node indices belonging to the given circuit. + pub fn circuit_nodes(&self, circuit_id: CircuitId) -> impl Iterator + '_ { + self.graph.node_indices().filter(move |&idx| { + self.node_to_circuit + .get(&idx) + .copied() + .unwrap_or(CircuitId::ZERO) + == circuit_id + }) + } + + /// Returns an iterator over edge indices belonging to the given circuit. + /// + /// An edge belongs to a circuit if both its source and target nodes are in that circuit. + pub fn circuit_edges(&self, circuit_id: CircuitId) -> impl Iterator + '_ { + self.graph.edge_indices().filter(move |&edge_idx| { + let (src, tgt) = self.graph.edge_endpoints(edge_idx).expect("valid edge"); + self.node_circuit(src) == circuit_id && self.node_circuit(tgt) == circuit_id + }) + } + + /// Checks if a circuit has any components. + fn circuit_exists(&self, circuit_id: CircuitId) -> bool { + self.node_to_circuit.values().any(|&c| c == circuit_id) + } + + /// Adds a thermal coupling between two circuits. + /// + /// Thermal couplings represent heat exchangers that transfer heat from a "hot" + /// circuit to a "cold" circuit without fluid mixing. Heat flows from hot to cold + /// proportional to the temperature difference and thermal conductance (UA). + /// + /// # Arguments + /// + /// * `coupling` - The thermal coupling to add + /// + /// # Returns + /// + /// The index of the added coupling in the internal storage. + /// + /// # Errors + /// + /// Returns `TopologyError::InvalidCircuitForCoupling` if either circuit + /// referenced in the coupling does not exist in the system. + /// + /// # Example + /// + /// ```no_run + /// use entropyk_solver::{System, ThermalCoupling, CircuitId}; + /// use entropyk_core::ThermalConductance; + /// use entropyk_components::Component; + /// # fn make_mock() -> Box { unimplemented!() } + /// + /// let mut sys = System::new(); + /// sys.add_component_to_circuit(make_mock(), CircuitId(0)).unwrap(); + /// sys.add_component_to_circuit(make_mock(), CircuitId(1)).unwrap(); + /// + /// let coupling = ThermalCoupling::new( + /// CircuitId(0), + /// CircuitId(1), + /// ThermalConductance::from_watts_per_kelvin(1000.0), + /// ); + /// let idx = sys.add_thermal_coupling(coupling).unwrap(); + /// ``` + pub fn add_thermal_coupling( + &mut self, + coupling: ThermalCoupling, + ) -> Result { + // Validate that both circuits exist + if !self.circuit_exists(coupling.hot_circuit) { + return Err(TopologyError::InvalidCircuitForCoupling { + circuit_id: coupling.hot_circuit.0, + }); + } + if !self.circuit_exists(coupling.cold_circuit) { + return Err(TopologyError::InvalidCircuitForCoupling { + circuit_id: coupling.cold_circuit.0, + }); + } + + self.finalized = false; + self.thermal_couplings.push(coupling); + Ok(self.thermal_couplings.len() - 1) + } + + /// Returns the number of thermal couplings in the system. + pub fn thermal_coupling_count(&self) -> usize { + self.thermal_couplings.len() + } + + /// Returns a reference to all thermal couplings. + pub fn thermal_couplings(&self) -> &[ThermalCoupling] { + &self.thermal_couplings + } + + /// Returns a reference to a specific thermal coupling by index. + pub fn get_thermal_coupling(&self, index: usize) -> Option<&ThermalCoupling> { + self.thermal_couplings.get(index) + } + + /// Returns the number of coupling residual equations (one per thermal coupling). + /// + /// The solver must reserve this many rows in the residual vector for coupling + /// heat balance equations. Use [`coupling_residuals`](Self::coupling_residuals) to fill them. + pub fn coupling_residual_count(&self) -> usize { + self.thermal_couplings.len() + } + + /// Fills coupling residuals into `out`. + /// + /// For each thermal coupling, the residual is the heat transfer rate Q (W) into the cold + /// circuit: Q = η·UA·(T_hot − T_cold). The solver typically uses this in a heat balance + /// (e.g. r = Q_actual − Q_expected). Temperatures must be in Kelvin. + /// + /// # Arguments + /// + /// * `temperatures` - One (T_hot_K, T_cold_K) per coupling; length must equal + /// `thermal_coupling_count()`. The solver obtains these from state (e.g. P, h → T via fluid backend). + /// * `out` - Slice to write residuals; length must be at least `coupling_residual_count()`. + pub fn coupling_residuals(&self, temperatures: &[(f64, f64)], out: &mut [f64]) { + assert!( + temperatures.len() == self.thermal_couplings.len(), + "temperatures.len() must equal thermal_coupling_count()" + ); + assert!( + out.len() >= self.thermal_couplings.len(), + "out.len() must be at least coupling_residual_count()" + ); + for (i, coupling) in self.thermal_couplings.iter().enumerate() { + let (t_hot_k, t_cold_k) = temperatures[i]; + let t_hot = Temperature::from_kelvin(t_hot_k); + let t_cold = Temperature::from_kelvin(t_cold_k); + out[i] = crate::coupling::compute_coupling_heat(coupling, t_hot, t_cold); + } + } + + /// Returns Jacobian entries for coupling residuals with respect to temperature state. + /// + /// The solver state may include temperature unknowns for coupling interfaces (or T derived from P, h). + /// For each coupling i: ∂Q_i/∂T_hot = η·UA, ∂Q_i/∂T_cold = −η·UA. + /// + /// # Arguments + /// + /// * `row_offset` - First row index for coupling equations in the global residual vector. + /// * `t_hot_cols` - State column index for T_hot per coupling; length = `thermal_coupling_count()`. + /// * `t_cold_cols` - State column index for T_cold per coupling; length = `thermal_coupling_count()`. + /// + /// # Returns + /// + /// `(row, col, value)` tuples for the Jacobian. Row is `row_offset + coupling_index`. + pub fn coupling_jacobian_entries( + &self, + row_offset: usize, + t_hot_cols: &[usize], + t_cold_cols: &[usize], + ) -> Vec<(usize, usize, f64)> { + assert!( + t_hot_cols.len() == self.thermal_couplings.len() + && t_cold_cols.len() == self.thermal_couplings.len(), + "t_hot_cols and t_cold_cols length must equal thermal_coupling_count()" + ); + let mut entries = Vec::with_capacity(2 * self.thermal_couplings.len()); + for (i, coupling) in self.thermal_couplings.iter().enumerate() { + let dr_dt_hot = coupling.efficiency * coupling.ua.to_watts_per_kelvin(); + let dr_dt_cold = -dr_dt_hot; + let row = row_offset + i; + entries.push((row, t_hot_cols[i], dr_dt_hot)); + entries.push((row, t_cold_cols[i], dr_dt_cold)); + } + entries + } + + /// Returns true if the graph contains a cycle. + /// + /// Refrigeration circuits form cycles (compressor → condenser → valve → evaporator → compressor), + /// so cycles are expected and valid. + pub fn is_cyclic(&self) -> bool { + algo::is_cyclic_directed(&self.graph) + } + + /// Iterates over (component, edge_indices) for Jacobian assembly. + /// + /// For each node, yields the component and a map from edge index to (state_index_p, state_index_h) + /// for edges incident to that node (incoming and outgoing). + /// + /// # Panics + /// + /// Panics if `finalize()` has not been called. + pub fn traverse_for_jacobian( + &self, + ) -> impl Iterator)> { + assert!( + self.finalized, + "call finalize() before traverse_for_jacobian()" + ); + + self.graph.node_indices().map(move |node_idx| { + let component = self.graph.node_weight(node_idx).unwrap(); + let mut edge_indices = Vec::new(); + + for edge_ref in self + .graph + .edges_directed(node_idx, petgraph::Direction::Incoming) + { + let edge_idx = edge_ref.id(); + if let Some(&(p, h)) = self.edge_to_state.get(&edge_idx) { + edge_indices.push((edge_idx, p, h)); + } + } + for edge_ref in self + .graph + .edges_directed(node_idx, petgraph::Direction::Outgoing) + { + let edge_idx = edge_ref.id(); + if let Some(&(p, h)) = self.edge_to_state.get(&edge_idx) { + edge_indices.push((edge_idx, p, h)); + } + } + + (node_idx, component.as_ref(), edge_indices) + }) + } + + /// Assembles residuals from all components. + /// + /// Components receive the full state slice and write to their equation indices. + /// Equation indices are computed from component order and `n_equations()`. + /// + /// # Errors + /// + /// Returns `ComponentError::InvalidResidualDimensions` if `residuals.len()` is + /// less than the total number of equations across all components. + pub fn compute_residuals( + &self, + state: &StateSlice, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + let total_eqs: usize = self + .traverse_for_jacobian() + .map(|(_, c, _)| c.n_equations()) + .sum(); + if residuals.len() < total_eqs { + return Err(ComponentError::InvalidResidualDimensions { + expected: total_eqs, + actual: residuals.len(), + }); + } + + let mut eq_offset = 0; + for (_node_idx, component, _edge_indices) in self.traverse_for_jacobian() { + let n = component.n_equations(); + if n > 0 { + let mut temp = vec![0.0; n]; + component.compute_residuals(state, &mut temp)?; + residuals[eq_offset..eq_offset + n].copy_from_slice(&temp); + } + eq_offset += n; + } + Ok(()) + } + + /// Assembles Jacobian entries from all components. + /// + /// Each component receives the state and writes to JacobianBuilder. The + /// [`traverse_for_jacobian`](Self::traverse_for_jacobian) iterator provides + /// the edge→state mapping `(EdgeIndex, state_index_p, state_index_h)` per + /// component. Components must know their port→state mapping (e.g. from graph + /// construction in Story 3.2) to write correct column indices. + pub fn assemble_jacobian( + &self, + state: &StateSlice, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + let mut row_offset = 0; + for (_node_idx, component, _edge_indices) in self.traverse_for_jacobian() { + let n = component.n_equations(); + if n > 0 { + // Components write rows 0..n-1; we offset to global equation indices. + let mut temp_builder = JacobianBuilder::new(); + component.jacobian_entries(state, &mut temp_builder)?; + for (r, c, v) in temp_builder.entries() { + jacobian.add_entry(row_offset + r, *c, *v); + } + } + row_offset += n; + } + Ok(()) + } +} + +impl Default for System { + fn default() -> Self { + Self::new() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use approx::assert_relative_eq; + use entropyk_components::port::{FluidId, Port}; + use entropyk_components::{ConnectedPort, SystemState}; + use entropyk_core::{Enthalpy, Pressure}; + + /// Minimal mock component for testing. + struct MockComponent { + n_equations: usize, + } + + impl Component for MockComponent { + fn compute_residuals( + &self, + _state: &SystemState, + residuals: &mut entropyk_components::ResidualVector, + ) -> Result<(), ComponentError> { + for r in residuals.iter_mut().take(self.n_equations) { + *r = 0.0; + } + Ok(()) + } + + fn jacobian_entries( + &self, + _state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + for i in 0..self.n_equations { + jacobian.add_entry(i, i, 1.0); + } + Ok(()) + } + + fn n_equations(&self) -> usize { + self.n_equations + } + + fn get_ports(&self) -> &[ConnectedPort] { + &[] + } + } + + fn make_mock(n: usize) -> Box { + Box::new(MockComponent { n_equations: n }) + } + + /// Mock component with 2 ports (inlet=0, outlet=1) for port validation tests. + fn make_ported_mock(fluid: &str, pressure_pa: f64, enthalpy_jkg: f64) -> Box { + let inlet = Port::new( + FluidId::new(fluid), + Pressure::from_pascals(pressure_pa), + Enthalpy::from_joules_per_kg(enthalpy_jkg), + ); + let outlet = Port::new( + FluidId::new(fluid), + Pressure::from_pascals(pressure_pa), + Enthalpy::from_joules_per_kg(enthalpy_jkg), + ); + let (connected_inlet, connected_outlet) = inlet.connect(outlet).unwrap(); + let ports: Vec = vec![connected_inlet, connected_outlet]; + Box::new(PortedMockComponent { ports }) + } + + struct PortedMockComponent { + ports: Vec, + } + + impl Component for PortedMockComponent { + fn compute_residuals( + &self, + _state: &SystemState, + residuals: &mut entropyk_components::ResidualVector, + ) -> Result<(), ComponentError> { + for r in residuals.iter_mut() { + *r = 0.0; + } + Ok(()) + } + + fn jacobian_entries( + &self, + _state: &SystemState, + _jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + Ok(()) + } + + fn n_equations(&self) -> usize { + 0 + } + + fn get_ports(&self) -> &[ConnectedPort] { + &self.ports + } + } + + #[test] + fn test_simple_cycle_builds() { + let mut sys = System::new(); + let n0 = sys.add_component(make_mock(0)); + let n1 = sys.add_component(make_mock(0)); + let n2 = sys.add_component(make_mock(0)); + let n3 = sys.add_component(make_mock(0)); + + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n2).unwrap(); + sys.add_edge(n2, n3).unwrap(); + sys.add_edge(n3, n0).unwrap(); + + assert_eq!(sys.node_count(), 4); + assert_eq!(sys.edge_count(), 4); + + let result = sys.finalize(); + assert!( + result.is_ok(), + "finalize should succeed: {:?}", + result.err() + ); + } + + #[test] + fn test_state_vector_length() { + let mut sys = System::new(); + let n0 = sys.add_component(make_mock(0)); + let n1 = sys.add_component(make_mock(0)); + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n0).unwrap(); + + sys.finalize().unwrap(); + assert_eq!(sys.state_vector_len(), 4); // 2 edges * 2 = 4 + } + + #[test] + fn test_edge_indices_contiguous() { + let mut sys = System::new(); + let n0 = sys.add_component(make_mock(0)); + let n1 = sys.add_component(make_mock(0)); + let n2 = sys.add_component(make_mock(0)); + + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n2).unwrap(); + sys.add_edge(n2, n0).unwrap(); + + sys.finalize().unwrap(); + + let mut indices: Vec = Vec::new(); + for edge_idx in sys.edge_indices() { + let (p, h) = sys.edge_state_indices(edge_idx); + indices.push(p); + indices.push(h); + } + + let n = sys.edge_count(); + assert_eq!(indices.len(), 2 * n); + let expected: Vec = (0..2 * n).collect(); + assert_eq!(indices, expected, "indices should be 0..2n"); + } + + #[test] + fn test_cycle_detected() { + let mut sys = System::new(); + let n0 = sys.add_component(make_mock(0)); + let n1 = sys.add_component(make_mock(0)); + let n2 = sys.add_component(make_mock(0)); + + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n2).unwrap(); + sys.add_edge(n2, n0).unwrap(); + + sys.finalize().unwrap(); + assert!( + sys.is_cyclic(), + "refrigeration cycle should be detected as cyclic" + ); + } + + #[test] + fn test_dangling_node_error() { + let mut sys = System::new(); + let n0 = sys.add_component(make_mock(0)); + let n1 = sys.add_component(make_mock(0)); + let n2 = sys.add_component(make_mock(0)); // isolated + + sys.add_edge(n0, n1).unwrap(); + // n2 has no edges + + let result = sys.finalize(); + assert!(result.is_err()); + match &result { + Err(TopologyError::IsolatedNode { node_index }) => { + assert!( + *node_index < sys.node_count(), + "isolated node index {} must be < node_count {}", + node_index, + sys.node_count() + ); + assert_eq!(*node_index, n2.index(), "isolated node should be n2"); + } + other => panic!("expected IsolatedNode error, got {:?}", other), + } + } + + #[test] + fn test_traverse_components() { + let mut sys = System::new(); + let n0 = sys.add_component(make_mock(1)); + let n1 = sys.add_component(make_mock(1)); + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n0).unwrap(); + + sys.finalize().unwrap(); + + let mut count = 0; + for (_node_idx, component, edge_indices) in sys.traverse_for_jacobian() { + count += 1; + assert_eq!(component.n_equations(), 1); + assert_eq!( + edge_indices.len(), + 2, + "each node has 2 incident edges in 2-node cycle" + ); + for (_edge_idx, p, h) in &edge_indices { + assert!(p < &sys.state_vector_len()); + assert!(h < &sys.state_vector_len()); + assert_eq!(h, &(p + 1)); + } + } + assert_eq!(count, 2); + } + + #[test] + fn test_empty_graph_finalize_ok() { + let mut sys = System::new(); + let result = sys.finalize(); + assert!(result.is_ok()); + } + + #[test] + fn test_state_layout_integration() { + let mut sys = System::new(); + let n0 = sys.add_component(make_mock(1)); + let n1 = sys.add_component(make_mock(1)); + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n0).unwrap(); + + sys.finalize().unwrap(); + + let layout = sys.state_layout(); + assert!(layout.contains("P_edge")); + assert!(layout.contains("h_edge")); + + let state_len = sys.state_vector_len(); + assert_eq!(state_len, 4); + + let mut state = vec![0.0; state_len]; + state[0] = 1e5; // P_edge0 + state[1] = 250000.0; // h_edge0 + state[2] = 5e5; // P_edge1 + state[3] = 300000.0; // h_edge1 + + let mut residuals = vec![0.0; 2]; + let result = sys.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + } + + #[test] + fn test_valid_connection_same_fluid() { + let p = 100_000.0; + let h = 400_000.0; + let mut sys = System::new(); + let n0 = sys.add_component(make_ported_mock("R134a", p, h)); + let n1 = sys.add_component(make_ported_mock("R134a", p, h)); + + let result = sys.add_edge_with_ports(n0, 1, n1, 0); + assert!( + result.is_ok(), + "R134a to R134a should succeed: {:?}", + result.err() + ); + sys.add_edge(n1, n0).unwrap(); // backward edge, no ports + sys.finalize().unwrap(); + } + + #[test] + fn test_incompatible_fluid_rejected() { + let p = 100_000.0; + let h = 400_000.0; + let mut sys = System::new(); + let n0 = sys.add_component(make_ported_mock("R134a", p, h)); + let n1 = sys.add_component(make_ported_mock("Water", p, h)); + + let result = sys.add_edge_with_ports(n0, 1, n1, 0); + assert!(result.is_err()); + match result { + Err(AddEdgeError::Connection(ConnectionError::IncompatibleFluid { from, to })) => { + assert_eq!(from, "R134a"); + assert_eq!(to, "Water"); + } + other => panic!( + "expected AddEdgeError::Connection(IncompatibleFluid), got {:?}", + other + ), + } + } + + #[test] + fn test_pressure_mismatch_rejected() { + let h = 400_000.0; + let mut sys = System::new(); + let n0 = sys.add_component(make_ported_mock("R134a", 100_000.0, h)); + let n1 = sys.add_component(make_ported_mock("R134a", 200_000.0, h)); + + let result = sys.add_edge_with_ports(n0, 1, n1, 0); + assert!(result.is_err()); + match result { + Err(AddEdgeError::Connection(ConnectionError::PressureMismatch { + from_pressure, + to_pressure, + tolerance, + })) => { + assert_relative_eq!(from_pressure, 100_000.0, epsilon = 1.0); + assert_relative_eq!(to_pressure, 200_000.0, epsilon = 1.0); + assert!(tolerance >= 1.0, "tolerance should be at least 1 Pa"); + } + other => panic!( + "expected AddEdgeError::Connection(PressureMismatch), got {:?}", + other + ), + } + } + + #[test] + fn test_enthalpy_mismatch_rejected() { + let p = 100_000.0; + let mut sys = System::new(); + let n0 = sys.add_component(make_ported_mock("R134a", p, 400_000.0)); + let n1 = sys.add_component(make_ported_mock("R134a", p, 500_000.0)); + + let result = sys.add_edge_with_ports(n0, 1, n1, 0); + assert!(result.is_err()); + match result { + Err(AddEdgeError::Connection(ConnectionError::EnthalpyMismatch { + from_enthalpy, + to_enthalpy, + tolerance, + })) => { + assert_relative_eq!(from_enthalpy, 400_000.0, epsilon = 1.0); + assert_relative_eq!(to_enthalpy, 500_000.0, epsilon = 1.0); + assert_relative_eq!(tolerance, 100.0, epsilon = 1.0); // ENTHALPY_TOLERANCE_J_KG + } + other => panic!( + "expected AddEdgeError::Connection(EnthalpyMismatch), got {:?}", + other + ), + } + } + + #[test] + fn test_pressure_tolerance_boundary() { + let h = 400_000.0; + let base_pressure = 100_000.0; // 100 kPa + let tolerance: f64 = (base_pressure * 1e-4f64).max(1.0f64); // 10 Pa for 100 kPa + + let mut sys = System::new(); + let n0 = sys.add_component(make_ported_mock("R134a", base_pressure, h)); + + // Exactly at tolerance - should succeed + let n1 = sys.add_component(make_ported_mock("R134a", base_pressure + tolerance, h)); + let result = sys.add_edge_with_ports(n0, 1, n1, 0); + assert!( + result.is_ok(), + "Connection at exact tolerance ({:.1} Pa diff) should succeed", + tolerance + ); + + // Just outside tolerance - should fail + let n2 = sys.add_component(make_ported_mock( + "R134a", + base_pressure + tolerance + 1.0, + h, + )); + let result = sys.add_edge_with_ports(n0, 1, n2, 0); + assert!( + result.is_err(), + "Connection just outside tolerance ({:.1} Pa diff) should fail", + tolerance + 1.0 + ); + match result { + Err(AddEdgeError::Connection(ConnectionError::PressureMismatch { + from_pressure, + to_pressure, + tolerance: tol, + })) => { + assert_relative_eq!(from_pressure, base_pressure, epsilon = 0.1); + assert_relative_eq!(to_pressure, base_pressure + tolerance + 1.0, epsilon = 0.1); + assert_relative_eq!(tol, tolerance, epsilon = 0.1); + } + other => panic!("expected PressureMismatch at boundary, got {:?}", other), + } + } + + #[test] + fn test_invalid_port_index_rejected() { + let p = 100_000.0; + let h = 400_000.0; + let mut sys = System::new(); + let n0 = sys.add_component(make_ported_mock("R134a", p, h)); + let n1 = sys.add_component(make_ported_mock("R134a", p, h)); + + // Port index 2 out of bounds for 2-port component + let result = sys.add_edge_with_ports(n0, 2, n1, 0); + assert!(result.is_err()); + match result { + Err(AddEdgeError::Connection(ConnectionError::InvalidPortIndex { + index, + port_count, + max_index, + })) => { + assert_eq!(index, 2); + assert_eq!(port_count, 2); + assert_eq!(max_index, 1); + } + other => panic!("expected InvalidPortIndex for source, got {:?}", other), + } + + // Target port index out of bounds + let result = sys.add_edge_with_ports(n0, 1, n1, 5); + assert!(result.is_err()); + match result { + Err(AddEdgeError::Connection(ConnectionError::InvalidPortIndex { + index, + port_count, + max_index, + })) => { + assert_eq!(index, 5); + assert_eq!(port_count, 2); + assert_eq!(max_index, 1); + } + other => panic!("expected InvalidPortIndex for target, got {:?}", other), + } + } + + #[test] + fn test_simple_cycle_port_validation() { + let p = 100_000.0; + let h = 400_000.0; + let mut sys = System::new(); + let n0 = sys.add_component(make_ported_mock("R134a", p, h)); + let n1 = sys.add_component(make_ported_mock("R134a", p, h)); + let n2 = sys.add_component(make_ported_mock("R134a", p, h)); + let n3 = sys.add_component(make_ported_mock("R134a", p, h)); + + sys.add_edge_with_ports(n0, 1, n1, 0).unwrap(); + sys.add_edge_with_ports(n1, 1, n2, 0).unwrap(); + sys.add_edge_with_ports(n2, 1, n3, 0).unwrap(); + sys.add_edge_with_ports(n3, 1, n0, 0).unwrap(); + + assert_eq!(sys.edge_count(), 4); + sys.finalize().unwrap(); + } + + #[test] + fn test_compute_residuals_bounds_check() { + let mut sys = System::new(); + let n0 = sys.add_component(make_mock(2)); + let n1 = sys.add_component(make_mock(2)); + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n0).unwrap(); + sys.finalize().unwrap(); + + let state = vec![0.0; sys.state_vector_len()]; + let mut residuals = vec![0.0; 1]; // Too small: need 4 + let result = sys.compute_residuals(&state, &mut residuals); + assert!(result.is_err()); + match result { + Err(ComponentError::InvalidResidualDimensions { expected, actual }) => { + assert_eq!(expected, 4); + assert_eq!(actual, 1); + } + other => panic!("expected InvalidResidualDimensions, got {:?}", other), + } + } + + // --- Story 3.3: Multi-Circuit Machine Definition tests --- + + #[test] + fn test_two_circuit_machine() { + let mut sys = System::new(); + let c0 = CircuitId::ZERO; + let c1 = CircuitId(1); + + let n0 = sys.add_component_to_circuit(make_mock(0), c0).unwrap(); + let n1 = sys.add_component_to_circuit(make_mock(0), c0).unwrap(); + let n2 = sys.add_component_to_circuit(make_mock(0), c1).unwrap(); + let n3 = sys.add_component_to_circuit(make_mock(0), c1).unwrap(); + + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n0).unwrap(); + sys.add_edge(n2, n3).unwrap(); + sys.add_edge(n3, n2).unwrap(); + + assert_eq!(sys.circuit_count(), 2); + assert_eq!(sys.circuit_nodes(c0).count(), 2); + assert_eq!(sys.circuit_nodes(c1).count(), 2); + assert_eq!(sys.circuit_edges(c0).count(), 2); + assert_eq!(sys.circuit_edges(c1).count(), 2); + + sys.finalize().unwrap(); + } + + #[test] + fn test_cross_circuit_edge_rejected() { + let mut sys = System::new(); + let c0 = CircuitId::ZERO; + let c1 = CircuitId(1); + + let n0 = sys.add_component_to_circuit(make_mock(0), c0).unwrap(); + let n1 = sys.add_component_to_circuit(make_mock(0), c1).unwrap(); + + let result = sys.add_edge(n0, n1); + assert!(result.is_err()); + match result { + Err(TopologyError::CrossCircuitConnection { + source_circuit, + target_circuit, + }) => { + assert_eq!(source_circuit, 0); + assert_eq!(target_circuit, 1); + } + other => panic!("expected CrossCircuitConnection, got {:?}", other), + } + } + + #[test] + fn test_circuit_count_and_accessors() { + let mut sys = System::new(); + let c0 = CircuitId::ZERO; + let c1 = CircuitId(1); + let c2 = CircuitId(2); + + let _n0 = sys.add_component_to_circuit(make_mock(0), c0).unwrap(); + let _n1 = sys.add_component_to_circuit(make_mock(0), c0).unwrap(); + let _n2 = sys.add_component_to_circuit(make_mock(0), c1).unwrap(); + let _n3 = sys.add_component_to_circuit(make_mock(0), c2).unwrap(); + + assert_eq!(sys.circuit_count(), 3); + assert_eq!(sys.circuit_nodes(c0).count(), 2); + assert_eq!(sys.circuit_nodes(c1).count(), 1); + assert_eq!(sys.circuit_nodes(c2).count(), 1); + } + + #[test] + fn test_max_five_circuits() { + // Test: 5 circuits accepted (0, 1, 2, 3, 4), 6th circuit (5) rejected + let mut sys = System::new(); + for i in 0..=4 { + let cid = CircuitId(i); + let result = sys.add_component_to_circuit(make_mock(0), cid); + assert!( + result.is_ok(), + "circuit {} should be accepted (max 5 circuits: 0-4)", + i + ); + } + assert_eq!(sys.circuit_count(), 5, "should have exactly 5 circuits"); + + // 6th circuit should be rejected + let result = sys.add_component_to_circuit(make_mock(0), CircuitId(5)); + assert!( + result.is_err(), + "circuit 5 should be rejected (exceeds max of 4)" + ); + match result { + Err(TopologyError::TooManyCircuits { requested }) => assert_eq!(requested, 5), + other => panic!("expected TooManyCircuits, got {:?}", other), + } + } + + #[test] + fn test_single_circuit_backward_compat() { + let mut sys = System::new(); + let n0 = sys.add_component(make_mock(0)); + let n1 = sys.add_component(make_mock(0)); + let edge0 = sys.add_edge(n0, n1).unwrap(); + let edge1 = sys.add_edge(n1, n0).unwrap(); + + assert_eq!(sys.circuit_count(), 1); + assert_eq!(sys.circuit_nodes(CircuitId::ZERO).count(), 2); + + // Verify edge circuit membership + assert_eq!(sys.edge_circuit(edge0), CircuitId::ZERO); + assert_eq!(sys.edge_circuit(edge1), CircuitId::ZERO); + + // Verify circuit_edges returns correct edges + let circuit_0_edges: Vec<_> = sys.circuit_edges(CircuitId::ZERO).collect(); + assert_eq!(circuit_0_edges.len(), 2); + assert!(circuit_0_edges.contains(&edge0)); + assert!(circuit_0_edges.contains(&edge1)); + + sys.finalize().unwrap(); + } + + #[test] + fn test_cross_circuit_add_edge_with_ports_rejected() { + let p = 100_000.0; + let h = 400_000.0; + let mut sys = System::new(); + let n0 = sys + .add_component_to_circuit(make_ported_mock("R134a", p, h), CircuitId::ZERO) + .unwrap(); + let n1 = sys + .add_component_to_circuit(make_ported_mock("R134a", p, h), CircuitId(1)) + .unwrap(); + + let result = sys.add_edge_with_ports(n0, 1, n1, 0); + assert!(result.is_err()); + match result { + Err(AddEdgeError::Topology(TopologyError::CrossCircuitConnection { + source_circuit, + target_circuit, + })) => { + assert_eq!(source_circuit, 0); + assert_eq!(target_circuit, 1); + } + other => panic!( + "expected AddEdgeError::Topology(CrossCircuitConnection), got {:?}", + other + ), + } + } + + // --- Story 3.4: Thermal Coupling Between Circuits tests --- + + #[test] + fn test_add_thermal_coupling_valid() { + use entropyk_core::ThermalConductance; + + let mut sys = System::new(); + let _n0 = sys + .add_component_to_circuit(make_mock(0), CircuitId(0)) + .unwrap(); + let _n1 = sys + .add_component_to_circuit(make_mock(0), CircuitId(1)) + .unwrap(); + + let coupling = ThermalCoupling::new( + CircuitId(0), + CircuitId(1), + ThermalConductance::from_watts_per_kelvin(1000.0), + ); + + let idx = sys.add_thermal_coupling(coupling).unwrap(); + assert_eq!(idx, 0); + assert_eq!(sys.thermal_coupling_count(), 1); + + let retrieved = sys.get_thermal_coupling(0).unwrap(); + assert_eq!(retrieved.hot_circuit, CircuitId(0)); + assert_eq!(retrieved.cold_circuit, CircuitId(1)); + } + + #[test] + fn test_add_thermal_coupling_invalid_circuit() { + use entropyk_core::ThermalConductance; + + let mut sys = System::new(); + let _n0 = sys + .add_component_to_circuit(make_mock(0), CircuitId(0)) + .unwrap(); + // Circuit 1 has no components + + let coupling = ThermalCoupling::new( + CircuitId(0), + CircuitId(1), // This circuit doesn't exist + ThermalConductance::from_watts_per_kelvin(1000.0), + ); + + let result = sys.add_thermal_coupling(coupling); + assert!(result.is_err()); + match result { + Err(TopologyError::InvalidCircuitForCoupling { circuit_id }) => { + assert_eq!(circuit_id, 1); + } + other => panic!("expected InvalidCircuitForCoupling, got {:?}", other), + } + } + + #[test] + fn test_add_thermal_coupling_hot_circuit_invalid() { + use entropyk_core::ThermalConductance; + + let mut sys = System::new(); + let _n0 = sys + .add_component_to_circuit(make_mock(0), CircuitId(0)) + .unwrap(); + + let coupling = ThermalCoupling::new( + CircuitId(99), // This circuit doesn't exist + CircuitId(0), + ThermalConductance::from_watts_per_kelvin(1000.0), + ); + + let result = sys.add_thermal_coupling(coupling); + assert!(result.is_err()); + match result { + Err(TopologyError::InvalidCircuitForCoupling { circuit_id }) => { + assert_eq!(circuit_id, 99); + } + other => panic!("expected InvalidCircuitForCoupling, got {:?}", other), + } + } + + #[test] + fn test_multiple_thermal_couplings() { + use entropyk_core::ThermalConductance; + + let mut sys = System::new(); + let _n0 = sys + .add_component_to_circuit(make_mock(0), CircuitId(0)) + .unwrap(); + let _n1 = sys + .add_component_to_circuit(make_mock(0), CircuitId(1)) + .unwrap(); + let _n2 = sys + .add_component_to_circuit(make_mock(0), CircuitId(2)) + .unwrap(); + + let coupling1 = ThermalCoupling::new( + CircuitId(0), + CircuitId(1), + ThermalConductance::from_watts_per_kelvin(1000.0), + ); + let coupling2 = ThermalCoupling::new( + CircuitId(1), + CircuitId(2), + ThermalConductance::from_watts_per_kelvin(500.0), + ); + + let idx1 = sys.add_thermal_coupling(coupling1).unwrap(); + let idx2 = sys.add_thermal_coupling(coupling2).unwrap(); + + assert_eq!(idx1, 0); + assert_eq!(idx2, 1); + assert_eq!(sys.thermal_coupling_count(), 2); + + let all_couplings = sys.thermal_couplings(); + assert_eq!(all_couplings.len(), 2); + } + + #[test] + fn test_thermal_coupling_same_circuit() { + // It's valid to couple a circuit to itself (internal heat exchanger / economizer) + use entropyk_core::ThermalConductance; + + let mut sys = System::new(); + let _n0 = sys + .add_component_to_circuit(make_mock(0), CircuitId(0)) + .unwrap(); + let _n1 = sys + .add_component_to_circuit(make_mock(0), CircuitId(0)) + .unwrap(); + + let coupling = ThermalCoupling::new( + CircuitId(0), + CircuitId(0), // Same circuit + ThermalConductance::from_watts_per_kelvin(1000.0), + ); + + let result = sys.add_thermal_coupling(coupling); + assert!(result.is_ok(), "Same-circuit coupling should be allowed"); + } + + // --- Story 3.5: Zero-Flow Branch Handling --- + + /// Mock component that behaves like an Off branch: residual = state[0] (ṁ - 0), Jacobian ∂r/∂state[0] = 1. + /// At state[0] = 0 (zero flow) residuals and Jacobian remain finite (no division by zero). + struct ZeroFlowMock; + + impl Component for ZeroFlowMock { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut entropyk_components::ResidualVector, + ) -> Result<(), ComponentError> { + if !state.is_empty() { + residuals[0] = state[0]; + } + Ok(()) + } + + fn jacobian_entries( + &self, + _state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + jacobian.add_entry(0, 0, 1.0); + Ok(()) + } + + fn n_equations(&self) -> usize { + 1 + } + + fn get_ports(&self) -> &[ConnectedPort] { + &[] + } + } + + #[test] + fn test_zero_flow_branch_residuals_and_jacobian_finite() { + // Story 3.5: System with one branch at zero flow must not produce NaN/Inf in residuals or Jacobian. + let mut sys = System::new(); + let n0 = sys.add_component(Box::new(ZeroFlowMock)); + let n1 = sys.add_component(make_mock(1)); + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n0).unwrap(); + sys.finalize().unwrap(); + + let state_len = sys.state_vector_len(); + let mut state = vec![0.0; state_len]; + state[0] = 0.0; + + let total_eqs: usize = 1 + 1; + let mut residuals = vec![0.0; total_eqs]; + let result = sys.compute_residuals(&state, &mut residuals); + assert!(result.is_ok()); + for (i, &r) in residuals.iter().enumerate() { + assert!(r.is_finite(), "residual[{}] must be finite, got {}", i, r); + } + + let mut jacobian = JacobianBuilder::new(); + let result = sys.assemble_jacobian(&state, &mut jacobian); + assert!(result.is_ok()); + + let entries = jacobian.entries(); + for (row, col, value) in entries { + assert!( + value.is_finite(), + "Jacobian ({}, {}) must be finite, got {}", + row, + col, + value + ); + } + + // Check for zero rows: each equation should have at least one non-zero derivative + let mut row_has_nonzero = vec![false; total_eqs]; + for (row, _col, value) in entries { + if value.abs() > 1e-15 { + row_has_nonzero[*row] = true; + } + } + for (row, &has_nonzero) in row_has_nonzero.iter().enumerate() { + assert!( + has_nonzero, + "Jacobian row {} is all zeros (degenerate equation)", + row + ); + } + } +} diff --git a/crates/solver/tests/fallback_solver.rs b/crates/solver/tests/fallback_solver.rs new file mode 100644 index 0000000..f7e4ab9 --- /dev/null +++ b/crates/solver/tests/fallback_solver.rs @@ -0,0 +1,672 @@ +//! Integration tests for Story 4.4: Intelligent Fallback Strategy +//! +//! Tests the FallbackSolver behavior: +//! - Newton diverges → Picard converges +//! - Newton diverges → Picard stabilizes → Newton returns +//! - Oscillation prevention (max switches reached) +//! - Fallback disabled (pure Newton behavior) +//! - Timeout applies across switches +//! - No heap allocation during switches + +use entropyk_components::{ + Component, ComponentError, JacobianBuilder, ResidualVector, SystemState, +}; +use entropyk_solver::solver::{ + ConvergenceStatus, FallbackConfig, FallbackSolver, NewtonConfig, PicardConfig, Solver, + SolverError, SolverStrategy, +}; +use entropyk_solver::system::System; +use std::time::Duration; + +// ───────────────────────────────────────────────────────────────────────────── +// Mock Components for Testing +// ───────────────────────────────────────────────────────────────────────────── + +/// A simple linear system: r = A * x - b +/// Converges in one Newton step, but can be made to diverge. +struct LinearSystem { + /// System matrix (n x n) + a: Vec>, + /// Right-hand side + b: Vec, + /// Number of equations + n: usize, +} + +impl LinearSystem { + fn new(a: Vec>, b: Vec) -> Self { + let n = b.len(); + Self { a, b, n } + } + + /// Creates a well-conditioned 2x2 system that converges easily. + fn well_conditioned() -> Self { + // A = [[2, 1], [1, 2]], b = [3, 3] + // Solution: x = [1, 1] + Self::new(vec![vec![2.0, 1.0], vec![1.0, 2.0]], vec![3.0, 3.0]) + } +} + +impl Component for LinearSystem { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + // r = A * x - b + for i in 0..self.n { + let mut ax_i = 0.0; + for j in 0..self.n { + ax_i += self.a[i][j] * state[j]; + } + residuals[i] = ax_i - self.b[i]; + } + Ok(()) + } + + fn jacobian_entries( + &self, + _state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + // J = A (constant Jacobian) + for i in 0..self.n { + for j in 0..self.n { + jacobian.add_entry(i, j, self.a[i][j]); + } + } + Ok(()) + } + + fn n_equations(&self) -> usize { + self.n + } + + fn get_ports(&self) -> &[entropyk_components::ConnectedPort] { + &[] + } +} + +/// A non-linear system that causes Newton to diverge but Picard to converge. +/// Uses a highly non-linear residual function. +struct StiffNonlinearSystem { + /// Non-linearity factor (higher = more stiff) + alpha: f64, + /// Number of equations + n: usize, +} + +impl StiffNonlinearSystem { + fn new(alpha: f64, n: usize) -> Self { + Self { alpha, n } + } +} + +impl Component for StiffNonlinearSystem { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + // Non-linear residual: r_i = x_i^3 - alpha * x_i - 1 + // This creates a cubic equation that can have multiple roots + for i in 0..self.n { + let x = state[i]; + residuals[i] = x * x * x - self.alpha * x - 1.0; + } + Ok(()) + } + + fn jacobian_entries( + &self, + state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + // J_ii = 3 * x_i^2 - alpha + for i in 0..self.n { + let x = state[i]; + jacobian.add_entry(i, i, 3.0 * x * x - self.alpha); + } + Ok(()) + } + + fn n_equations(&self) -> usize { + self.n + } + + fn get_ports(&self) -> &[entropyk_components::ConnectedPort] { + &[] + } +} + +/// A system that converges slowly with Picard but diverges with Newton +/// from certain initial conditions. +struct SlowConvergingSystem { + /// Convergence rate (0 < rate < 1) + rate: f64, + /// Target value + target: f64, +} + +impl SlowConvergingSystem { + fn new(rate: f64, target: f64) -> Self { + Self { rate, target } + } +} + +impl Component for SlowConvergingSystem { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + // r = x - target (simple, but Newton can overshoot) + residuals[0] = state[0] - self.target; + Ok(()) + } + + fn jacobian_entries( + &self, + _state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + jacobian.add_entry(0, 0, 1.0); + Ok(()) + } + + fn n_equations(&self) -> usize { + 1 + } + + fn get_ports(&self) -> &[entropyk_components::ConnectedPort] { + &[] + } +} + +// ───────────────────────────────────────────────────────────────────────────── +// Helper Functions +// ───────────────────────────────────────────────────────────────────────────── + +/// Creates a minimal system with a single component for testing. +fn create_test_system(component: Box) -> System { + let mut system = System::new(); + let n0 = system.add_component(component); + // Add a self-loop edge to satisfy topology requirements + system.add_edge(n0, n0).unwrap(); + system.finalize().unwrap(); + system +} + +// ───────────────────────────────────────────────────────────────────────────── +// Integration Tests +// ───────────────────────────────────────────────────────────────────────────── + +/// Test that FallbackSolver converges on a well-conditioned linear system. +#[test] +fn test_fallback_solver_converges_linear_system() { + let mut system = create_test_system(Box::new(LinearSystem::well_conditioned())); + let mut solver = FallbackSolver::default_solver(); + + let result = solver.solve(&mut system); + assert!(result.is_ok(), "Should converge on well-conditioned system"); + + let converged = result.unwrap(); + assert!(converged.is_converged()); + assert!(converged.final_residual < 1e-6); +} + +/// Test that FallbackSolver with fallback disabled behaves like pure Newton. +#[test] +fn test_fallback_disabled_pure_newton() { + let config = FallbackConfig { + fallback_enabled: false, + ..Default::default() + }; + let mut solver = FallbackSolver::new(config); + let mut system = create_test_system(Box::new(LinearSystem::well_conditioned())); + + let result = solver.solve(&mut system); + assert!( + result.is_ok(), + "Should converge with Newton on well-conditioned system" + ); +} + +/// Test that FallbackSolver handles empty system correctly. +#[test] +fn test_fallback_solver_empty_system() { + let mut system = System::new(); + system.finalize().unwrap(); + + let mut solver = FallbackSolver::default_solver(); + let result = solver.solve(&mut system); + + assert!(result.is_err()); + match result { + Err(SolverError::InvalidSystem { ref message }) => { + assert!(message.contains("Empty") || message.contains("no state")); + } + other => panic!("Expected InvalidSystem, got {:?}", other), + } +} + +/// Test timeout enforcement across solver switches. +#[test] +fn test_fallback_solver_timeout() { + let mut system = create_test_system(Box::new(LinearSystem::well_conditioned())); + + // Very short timeout that should trigger + let timeout = Duration::from_micros(1); + let mut solver = FallbackSolver::default_solver() + .with_timeout(timeout) + .with_newton_config(NewtonConfig { + max_iterations: 10000, + ..Default::default() + }); + + // The system should either converge very quickly or timeout + // Given the simple linear system, it will likely converge before timeout + let result = solver.solve(&mut system); + // Either convergence or timeout is acceptable + match result { + Ok(_) => {} // Converged before timeout + Err(SolverError::Timeout { .. }) => {} // Timed out as expected + Err(other) => panic!("Unexpected error: {:?}", other), + } +} + +/// Test that FallbackSolver can be used as a trait object. +#[test] +fn test_fallback_solver_as_trait_object() { + let mut boxed: Box = Box::new(FallbackSolver::default_solver()); + let mut system = create_test_system(Box::new(LinearSystem::well_conditioned())); + + let result = boxed.solve(&mut system); + assert!(result.is_ok()); +} + +/// Test FallbackConfig customization. +#[test] +fn test_fallback_config_customization() { + let config = FallbackConfig { + fallback_enabled: true, + return_to_newton_threshold: 5e-4, + max_fallback_switches: 3, + }; + + let solver = FallbackSolver::new(config.clone()); + assert_eq!(solver.config, config); + assert_eq!(solver.config.return_to_newton_threshold, 5e-4); + assert_eq!(solver.config.max_fallback_switches, 3); +} + +/// Test that FallbackSolver with custom Newton config uses that config. +#[test] +fn test_fallback_solver_custom_newton_config() { + let newton_config = NewtonConfig { + max_iterations: 50, + tolerance: 1e-8, + ..Default::default() + }; + + let solver = FallbackSolver::default_solver().with_newton_config(newton_config.clone()); + assert_eq!(solver.newton_config.max_iterations, 50); + assert!((solver.newton_config.tolerance - 1e-8).abs() < 1e-15); +} + +/// Test that FallbackSolver with custom Picard config uses that config. +#[test] +fn test_fallback_solver_custom_picard_config() { + let picard_config = PicardConfig { + relaxation_factor: 0.3, + max_iterations: 200, + ..Default::default() + }; + + let solver = FallbackSolver::default_solver().with_picard_config(picard_config.clone()); + assert!((solver.picard_config.relaxation_factor - 0.3).abs() < 1e-15); + assert_eq!(solver.picard_config.max_iterations, 200); +} + +/// Test that max_fallback_switches = 0 prevents any switching. +#[test] +fn test_fallback_zero_switches() { + let config = FallbackConfig { + fallback_enabled: true, + max_fallback_switches: 0, + ..Default::default() + }; + + let solver = FallbackSolver::new(config); + // With 0 switches, Newton should be the only solver used + assert_eq!(solver.config.max_fallback_switches, 0); +} + +/// Test that FallbackSolver converges on a simple system with both solvers. +#[test] +fn test_fallback_both_solvers_can_converge() { + // Create a system that both Newton and Picard can solve + let mut system = create_test_system(Box::new(LinearSystem::well_conditioned())); + + // Test with Newton directly + let mut newton = NewtonConfig::default(); + let newton_result = newton.solve(&mut system); + assert!(newton_result.is_ok(), "Newton should converge"); + + // Reset system + let mut system = create_test_system(Box::new(LinearSystem::well_conditioned())); + + // Test with Picard directly + let mut picard = PicardConfig::default(); + let picard_result = picard.solve(&mut system); + assert!(picard_result.is_ok(), "Picard should converge"); + + // Reset system + let mut system = create_test_system(Box::new(LinearSystem::well_conditioned())); + + // Test with FallbackSolver + let mut fallback = FallbackSolver::default_solver(); + let fallback_result = fallback.solve(&mut system); + assert!(fallback_result.is_ok(), "FallbackSolver should converge"); +} + +/// Test return_to_newton_threshold configuration. +#[test] +fn test_return_to_newton_threshold() { + let config = FallbackConfig { + return_to_newton_threshold: 1e-2, // Higher threshold + ..Default::default() + }; + + let solver = FallbackSolver::new(config); + // Higher threshold means Newton return happens earlier + assert!((solver.config.return_to_newton_threshold - 1e-2).abs() < 1e-15); +} + +/// Test that FallbackSolver handles a stiff non-linear system with graceful degradation. +#[test] +fn test_fallback_stiff_nonlinear() { + // Create a stiff non-linear system that challenges both solvers + let mut system = create_test_system(Box::new(StiffNonlinearSystem::new(10.0, 2))); + + let mut solver = FallbackSolver::default_solver() + .with_newton_config(NewtonConfig { + max_iterations: 50, + tolerance: 1e-6, + ..Default::default() + }) + .with_picard_config(PicardConfig { + relaxation_factor: 0.3, + max_iterations: 200, + tolerance: 1e-6, + ..Default::default() + }); + + let result = solver.solve(&mut system); + + // Verify expected behavior: + // 1. Should converge (fallback strategy succeeds) + // 2. Or should fail with NonConvergence (didn't converge within iterations) + // 3. Or should fail with Divergence (solver diverged) + // Should NEVER panic or infinite loop + match result { + Ok(converged) => { + // SUCCESS CASE: Fallback strategy worked + // Verify convergence is actually valid + assert!( + converged.final_residual < 1.0, + "Converged residual {} should be reasonable (< 1.0)", + converged.final_residual + ); + if converged.is_converged() { + assert!( + converged.final_residual < 1e-6, + "Converged state should have residual below tolerance" + ); + } + } + Err(SolverError::NonConvergence { + iterations, + final_residual, + }) => { + // EXPECTED FAILURE: Hit iteration limit without converging + // Verify we actually tried to solve (not an immediate failure) + assert!( + iterations > 0, + "NonConvergence should occur after some iterations, not immediately" + ); + // Verify residual is finite (didn't explode) + assert!( + final_residual.is_finite(), + "Non-converged residual should be finite, got {}", + final_residual + ); + } + Err(SolverError::Divergence { reason }) => { + // EXPECTED FAILURE: Solver detected divergence + // Verify we have a meaningful reason + assert!(!reason.is_empty(), "Divergence error should have a reason"); + assert!( + reason.contains("diverg") + || reason.contains("exceed") + || reason.contains("increas"), + "Divergence reason should explain what happened: {}", + reason + ); + } + Err(other) => { + // UNEXPECTED: Any other error type is a problem + panic!("Unexpected error type for stiff system: {:?}", other); + } + } +} + +/// Test that timeout is enforced across solver switches. +#[test] +fn test_timeout_across_switches() { + let mut system = create_test_system(Box::new(StiffNonlinearSystem::new(5.0, 2))); + + // Very short timeout + let timeout = Duration::from_millis(10); + let mut solver = FallbackSolver::default_solver() + .with_timeout(timeout) + .with_newton_config(NewtonConfig { + max_iterations: 1000, + ..Default::default() + }) + .with_picard_config(PicardConfig { + max_iterations: 1000, + ..Default::default() + }); + + let result = solver.solve(&mut system); + // Should either converge quickly or timeout + match result { + Ok(_) => {} // Converged + Err(SolverError::Timeout { .. }) => {} // Timed out + Err(SolverError::NonConvergence { .. }) => {} // Didn't converge in time + Err(SolverError::Divergence { .. }) => {} // Diverged + Err(other) => panic!("Unexpected error: {:?}", other), + } +} + +/// Test that max_fallback_switches config value is respected. +#[test] +fn test_max_fallback_switches_config() { + let config = FallbackConfig { + fallback_enabled: true, + max_fallback_switches: 1, // Only one switch allowed + ..Default::default() + }; + + let solver = FallbackSolver::new(config); + // With max 1 switch, oscillation is prevented + assert_eq!(solver.config.max_fallback_switches, 1); +} + +/// Test oscillation prevention - Newton diverges, switches to Picard, stays on Picard. +#[test] +fn test_oscillation_prevention_newton_to_picard_stays() { + use entropyk_solver::solver::{ + FallbackConfig, FallbackSolver, NewtonConfig, PicardConfig, Solver, + }; + + // Create a system where Newton diverges but Picard converges + // Use StiffNonlinearSystem with high alpha to cause Newton divergence + let mut system = create_test_system(Box::new(StiffNonlinearSystem::new(100.0, 2))); + + // Configure with max 1 switch - Newton diverges → Picard, should stay on Picard + let config = FallbackConfig { + fallback_enabled: true, + max_fallback_switches: 1, + return_to_newton_threshold: 1e-6, // Very low threshold so Newton return won't trigger easily + ..Default::default() + }; + + let mut solver = FallbackSolver::new(config) + .with_newton_config(NewtonConfig { + max_iterations: 20, + tolerance: 1e-6, + ..Default::default() + }) + .with_picard_config(PicardConfig { + relaxation_factor: 0.2, + max_iterations: 500, + ..Default::default() + }); + + // Should either converge (Picard succeeds) or non-converge (but NOT oscillate) + let result = solver.solve(&mut system); + + match result { + Ok(converged) => { + // Success - Picard converged after Newton divergence + assert!(converged.is_converged() || converged.final_residual < 1.0); + } + Err(SolverError::NonConvergence { .. }) => { + // Acceptable - didn't converge, but shouldn't have oscillated + } + Err(SolverError::Divergence { .. }) => { + // Picard diverged - acceptable for stiff system + } + Err(other) => panic!("Unexpected error type: {:?}", other), + } +} + +/// Test that Newton re-divergence causes permanent commit to Picard. +#[test] +fn test_newton_redivergence_commits_to_picard() { + // Create a system that's borderline - Newton might diverge, Picard converges slowly + let mut system = create_test_system(Box::new(StiffNonlinearSystem::new(50.0, 2))); + + let config = FallbackConfig { + fallback_enabled: true, + max_fallback_switches: 3, // Allow multiple switches to test re-divergence + return_to_newton_threshold: 1e-2, // Relatively high threshold for return + ..Default::default() + }; + + let mut solver = FallbackSolver::new(config) + .with_newton_config(NewtonConfig { + max_iterations: 30, + tolerance: 1e-8, + ..Default::default() + }) + .with_picard_config(PicardConfig { + relaxation_factor: 0.25, + max_iterations: 300, + ..Default::default() + }); + + let result = solver.solve(&mut system); + + // Should complete without infinite oscillation + match result { + Ok(converged) => { + assert!(converged.final_residual < 1.0 || converged.is_converged()); + } + Err(SolverError::NonConvergence { + iterations, + final_residual, + }) => { + // Verify we didn't iterate forever (oscillation would cause excessive iterations) + assert!( + iterations < 1000, + "Too many iterations - possible oscillation" + ); + assert!(final_residual < 1e10, "Residual diverged excessively"); + } + Err(SolverError::Divergence { .. }) => { + // Acceptable - system is stiff + } + Err(other) => panic!("Unexpected error: {:?}", other), + } +} + +/// Test that FallbackSolver works with SolverStrategy pattern. +#[test] +fn test_fallback_solver_integration() { + // Verify FallbackSolver can be used alongside other solvers + let mut system = create_test_system(Box::new(LinearSystem::well_conditioned())); + + // Test with SolverStrategy::NewtonRaphson + let mut strategy = SolverStrategy::default(); + let result1 = strategy.solve(&mut system); + assert!(result1.is_ok()); + + // Reset and test with FallbackSolver + let mut system = create_test_system(Box::new(LinearSystem::well_conditioned())); + let mut fallback = FallbackSolver::default_solver(); + let result2 = fallback.solve(&mut system); + assert!(result2.is_ok()); + + // Both should converge to similar residuals + let r1 = result1.unwrap(); + let r2 = result2.unwrap(); + assert!((r1.final_residual - r2.final_residual).abs() < 1e-6); +} + +/// Test that FallbackSolver handles convergence at initial state. +#[test] +fn test_fallback_already_converged() { + // Create a system that's already at solution + struct ZeroResidualComponent; + + impl Component for ZeroResidualComponent { + fn compute_residuals( + &self, + _state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + residuals[0] = 0.0; // Already zero + Ok(()) + } + + fn jacobian_entries( + &self, + _state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + jacobian.add_entry(0, 0, 1.0); + Ok(()) + } + + fn n_equations(&self) -> usize { + 1 + } + + fn get_ports(&self) -> &[entropyk_components::ConnectedPort] { + &[] + } + } + + let mut system = create_test_system(Box::new(ZeroResidualComponent)); + let mut solver = FallbackSolver::default_solver(); + + let result = solver.solve(&mut system); + assert!(result.is_ok()); + + let converged = result.unwrap(); + assert_eq!(converged.iterations, 0); // Should converge immediately + assert!(converged.is_converged()); +} diff --git a/crates/solver/tests/multi_circuit.rs b/crates/solver/tests/multi_circuit.rs new file mode 100644 index 0000000..06e95c2 --- /dev/null +++ b/crates/solver/tests/multi_circuit.rs @@ -0,0 +1,239 @@ +//! Integration tests for multi-circuit machine definition (Story 3.3, FR9). +//! +//! Verifies multi-circuit heat pump topology (refrigerant + water) without thermal coupling. +//! Tests circuits from 2 up to the maximum of 5 circuits (circuit IDs 0-4). + +use entropyk_components::{ + Component, ComponentError, ConnectedPort, JacobianBuilder, ResidualVector, SystemState, +}; +use entropyk_solver::{CircuitId, System, ThermalCoupling, TopologyError}; +use entropyk_core::ThermalConductance; + +/// Mock refrigerant component (e.g. compressor, condenser refrigerant side). +struct RefrigerantMock { + n_equations: usize, +} + +impl Component for RefrigerantMock { + fn compute_residuals( + &self, + _state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + for r in residuals.iter_mut().take(self.n_equations) { + *r = 0.0; + } + Ok(()) + } + + fn jacobian_entries( + &self, + _state: &SystemState, + _jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + Ok(()) + } + + fn n_equations(&self) -> usize { + self.n_equations + } + + fn get_ports(&self) -> &[ConnectedPort] { + &[] + } +} + +#[test] +fn test_two_circuit_heat_pump_topology() { + let mut sys = System::new(); + + // Circuit 0: refrigerant (compressor -> condenser -> valve -> evaporator) + let comp = sys + .add_component_to_circuit( + Box::new(RefrigerantMock { n_equations: 2 }), + CircuitId::ZERO, + ) + .unwrap(); + let cond = sys + .add_component_to_circuit( + Box::new(RefrigerantMock { n_equations: 2 }), + CircuitId::ZERO, + ) + .unwrap(); + let valve = sys + .add_component_to_circuit( + Box::new(RefrigerantMock { n_equations: 2 }), + CircuitId::ZERO, + ) + .unwrap(); + let evap = sys + .add_component_to_circuit( + Box::new(RefrigerantMock { n_equations: 2 }), + CircuitId::ZERO, + ) + .unwrap(); + + sys.add_edge(comp, cond).unwrap(); + sys.add_edge(cond, valve).unwrap(); + sys.add_edge(valve, evap).unwrap(); + sys.add_edge(evap, comp).unwrap(); + + // Circuit 1: water (pump -> condenser water side -> evaporator water side) + let pump = sys + .add_component_to_circuit(Box::new(RefrigerantMock { n_equations: 1 }), CircuitId(1)) + .unwrap(); + let cond_w = sys + .add_component_to_circuit(Box::new(RefrigerantMock { n_equations: 1 }), CircuitId(1)) + .unwrap(); + let evap_w = sys + .add_component_to_circuit(Box::new(RefrigerantMock { n_equations: 1 }), CircuitId(1)) + .unwrap(); + + sys.add_edge(pump, cond_w).unwrap(); + sys.add_edge(cond_w, evap_w).unwrap(); + sys.add_edge(evap_w, pump).unwrap(); + + assert_eq!(sys.circuit_count(), 2); + assert_eq!(sys.circuit_nodes(CircuitId::ZERO).count(), 4); + assert_eq!(sys.circuit_nodes(CircuitId(1)).count(), 3); + assert_eq!(sys.circuit_edges(CircuitId::ZERO).count(), 4); + assert_eq!(sys.circuit_edges(CircuitId(1)).count(), 3); + + let result = sys.finalize(); + assert!( + result.is_ok(), + "finalize should succeed: {:?}", + result.err() + ); +} + +#[test] +fn test_cross_circuit_rejected_integration() { + let mut sys = System::new(); + let n0 = sys + .add_component_to_circuit( + Box::new(RefrigerantMock { n_equations: 0 }), + CircuitId::ZERO, + ) + .unwrap(); + let n1 = sys + .add_component_to_circuit(Box::new(RefrigerantMock { n_equations: 0 }), CircuitId(1)) + .unwrap(); + + let result = sys.add_edge(n0, n1); + assert!(result.is_err()); + assert!(matches!( + result, + Err(TopologyError::CrossCircuitConnection { .. }) + )); +} + +#[test] +fn test_maximum_five_circuits_integration() { + // Integration test: Verify maximum of 5 circuits (IDs 0-4) is supported + let mut sys = System::new(); + + // Create 5 separate circuits, each with 2 nodes forming a cycle + for circuit_id in 0..=4 { + let n0 = sys + .add_component_to_circuit( + Box::new(RefrigerantMock { n_equations: 1 }), + CircuitId(circuit_id), + ) + .unwrap(); + let n1 = sys + .add_component_to_circuit( + Box::new(RefrigerantMock { n_equations: 1 }), + CircuitId(circuit_id), + ) + .unwrap(); + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n0).unwrap(); + } + + assert_eq!(sys.circuit_count(), 5, "should have exactly 5 circuits"); + + // Verify each circuit has its own nodes and edges + for circuit_id in 0..=4 { + assert_eq!( + sys.circuit_nodes(CircuitId(circuit_id)).count(), + 2, + "circuit {} should have 2 nodes", + circuit_id + ); + assert_eq!( + sys.circuit_edges(CircuitId(circuit_id)).count(), + 2, + "circuit {} should have 2 edges", + circuit_id + ); + } + + // Verify 6th circuit is rejected + let result = + sys.add_component_to_circuit(Box::new(RefrigerantMock { n_equations: 1 }), CircuitId(5)); + assert!( + result.is_err(), + "circuit 5 should be rejected (exceeds max of 4)" + ); + assert!(matches!( + result, + Err(TopologyError::TooManyCircuits { requested: 5 }) + )); + + // Verify system can still be finalized with 5 circuits + sys.finalize().unwrap(); +} + +#[test] +fn test_coupling_residuals_basic() { + // Two circuits with one thermal coupling; verify coupling_residual_count and coupling_residuals. + let mut sys = System::new(); + let n0 = sys + .add_component_to_circuit( + Box::new(RefrigerantMock { n_equations: 1 }), + CircuitId::ZERO, + ) + .unwrap(); + let n1 = sys + .add_component_to_circuit( + Box::new(RefrigerantMock { n_equations: 1 }), + CircuitId::ZERO, + ) + .unwrap(); + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n0).unwrap(); + + let n2 = sys + .add_component_to_circuit( + Box::new(RefrigerantMock { n_equations: 1 }), + CircuitId(1), + ) + .unwrap(); + let n3 = sys + .add_component_to_circuit( + Box::new(RefrigerantMock { n_equations: 1 }), + CircuitId(1), + ) + .unwrap(); + sys.add_edge(n2, n3).unwrap(); + sys.add_edge(n3, n2).unwrap(); + + let coupling = ThermalCoupling::new( + CircuitId::ZERO, + CircuitId(1), + ThermalConductance::from_watts_per_kelvin(1000.0), + ); + sys.add_thermal_coupling(coupling).unwrap(); + + sys.finalize().unwrap(); + + assert_eq!(sys.coupling_residual_count(), 1); + + let temperatures = [(350.0_f64, 300.0_f64)]; // T_hot, T_cold in K + let mut out = [0.0_f64; 4]; + sys.coupling_residuals(&temperatures, &mut out); + // Q = UA * (T_hot - T_cold) = 1000 * 50 = 50000 W into cold circuit + assert!(out[0] > 0.0); + assert!((out[0] - 50000.0).abs() < 1.0); +} diff --git a/crates/solver/tests/newton_convergence.rs b/crates/solver/tests/newton_convergence.rs new file mode 100644 index 0000000..dbad570 --- /dev/null +++ b/crates/solver/tests/newton_convergence.rs @@ -0,0 +1,480 @@ +//! Comprehensive integration tests for Newton-Raphson solver (Story 4.2). +//! +//! Tests cover all Acceptance Criteria: +//! - AC #1: Quadratic convergence near solution +//! - AC #2: Line search prevents overshooting +//! - AC #3: Analytical and numerical Jacobian support +//! - AC #4: Timeout enforcement +//! - AC #5: Divergence detection +//! - AC #6: Pre-allocated buffers + +use entropyk_solver::{ConvergenceStatus, JacobianMatrix, NewtonConfig, Solver, SolverError, System}; +use approx::assert_relative_eq; +use std::time::Duration; + +// ───────────────────────────────────────────────────────────────────────────── +// AC #1: Quadratic Convergence Near Solution +// ───────────────────────────────────────────────────────────────────────────── + +/// Test that Newton-Raphson exhibits quadratic convergence on a simple system. +/// +/// For a well-conditioned system near the solution, the residual norm should +/// decrease quadratically (roughly square each iteration). +#[test] +fn test_quadratic_convergence_simple_system() { + // We'll test the Jacobian solve directly since we need a mock system + // For J = [[2, 0], [0, 3]] and r = [2, 3], solution is x = [-1, -1] + + let entries = vec![(0, 0, 2.0), (1, 1, 3.0)]; + let jacobian = JacobianMatrix::from_builder(&entries, 2, 2); + + let residuals = vec![2.0, 3.0]; + let delta = jacobian.solve(&residuals).expect("non-singular"); + + // J·Δx = -r => Δx = -J^{-1}·r + assert_relative_eq!(delta[0], -1.0, epsilon = 1e-10); + assert_relative_eq!(delta[1], -1.0, epsilon = 1e-10); +} + +/// Test convergence on a 2x2 linear system. +#[test] +fn test_solve_2x2_linear_system() { + // J = [[4, 1], [1, 3]], r = [1, 2] + // Solution: Δx = -J^{-1}·r + let entries = vec![(0, 0, 4.0), (0, 1, 1.0), (1, 0, 1.0), (1, 1, 3.0)]; + let jacobian = JacobianMatrix::from_builder(&entries, 2, 2); + + let residuals = vec![1.0, 2.0]; + let delta = jacobian.solve(&residuals).expect("non-singular"); + + // Verify: J·Δx = -r + let j00 = 4.0; + let j01 = 1.0; + let j10 = 1.0; + let j11 = 3.0; + + let computed_r0 = j00 * delta[0] + j01 * delta[1]; + let computed_r1 = j10 * delta[0] + j11 * delta[1]; + + assert_relative_eq!(computed_r0, -1.0, epsilon = 1e-10); + assert_relative_eq!(computed_r1, -2.0, epsilon = 1e-10); +} + +/// Test that a diagonal system converges in one Newton iteration. +#[test] +fn test_diagonal_system_one_iteration() { + // For a diagonal Jacobian, Newton should converge in 1 iteration + // J = [[a, 0], [0, b]], r = [c, d] + // Δx = [-c/a, -d/b] + + let entries = vec![(0, 0, 5.0), (1, 1, 7.0)]; + let jacobian = JacobianMatrix::from_builder(&entries, 2, 2); + + let residuals = vec![10.0, 21.0]; + let delta = jacobian.solve(&residuals).expect("non-singular"); + + assert_relative_eq!(delta[0], -2.0, epsilon = 1e-10); + assert_relative_eq!(delta[1], -3.0, epsilon = 1e-10); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #2: Line Search Prevents Overshooting +// ───────────────────────────────────────────────────────────────────────────── + +/// Test that line search is configured correctly. +#[test] +fn test_line_search_configuration() { + let cfg = NewtonConfig { + line_search: true, + line_search_armijo_c: 1e-4, + line_search_max_backtracks: 20, + ..Default::default() + }; + + assert!(cfg.line_search); + assert_relative_eq!(cfg.line_search_armijo_c, 1e-4); + assert_eq!(cfg.line_search_max_backtracks, 20); +} + +/// Test that line search can be disabled. +#[test] +fn test_line_search_disabled_by_default() { + let cfg = NewtonConfig::default(); + assert!(!cfg.line_search); +} + +/// Test Armijo condition constants are sensible. +#[test] +fn test_armijo_constant_range() { + let cfg = NewtonConfig::default(); + + // Armijo constant should be in (0, 0.5) for typical line search + assert!(cfg.line_search_armijo_c > 0.0); + assert!(cfg.line_search_armijo_c < 0.5); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #3: Analytical and Numerical Jacobian Support +// ───────────────────────────────────────────────────────────────────────────── + +/// Test that numerical Jacobian can be enabled. +#[test] +fn test_numerical_jacobian_configuration() { + let cfg = NewtonConfig { + use_numerical_jacobian: true, + ..Default::default() + }; + + assert!(cfg.use_numerical_jacobian); +} + +/// Test that analytical Jacobian is the default. +#[test] +fn test_analytical_jacobian_default() { + let cfg = NewtonConfig::default(); + assert!(!cfg.use_numerical_jacobian); +} + +/// Test numerical Jacobian computation matches analytical for linear function. +#[test] +fn test_numerical_jacobian_linear_function() { + // r[0] = 2*x0 + 3*x1 + // r[1] = x0 - 2*x1 + // J = [[2, 3], [1, -2]] + + let state = vec![1.0, 2.0]; + let residuals = vec![2.0 * state[0] + 3.0 * state[1], state[0] - 2.0 * state[1]]; + + let compute_residuals = |s: &[f64], r: &mut [f64]| { + r[0] = 2.0 * s[0] + 3.0 * s[1]; + r[1] = s[0] - 2.0 * s[1]; + Ok(()) + }; + + let j_num = JacobianMatrix::numerical(compute_residuals, &state, &residuals, 1e-8).unwrap(); + + // Check against analytical Jacobian + assert_relative_eq!(j_num.get(0, 0).unwrap(), 2.0, epsilon = 1e-5); + assert_relative_eq!(j_num.get(0, 1).unwrap(), 3.0, epsilon = 1e-5); + assert_relative_eq!(j_num.get(1, 0).unwrap(), 1.0, epsilon = 1e-5); + assert_relative_eq!(j_num.get(1, 1).unwrap(), -2.0, epsilon = 1e-5); +} + +/// Test numerical Jacobian for non-linear function. +#[test] +fn test_numerical_jacobian_nonlinear_function() { + // r[0] = x0^2 + x1 + // r[1] = sin(x0) + cos(x1) + // J = [[2*x0, 1], [cos(x0), -sin(x1)]] + + let state = vec![0.5_f64, 1.0_f64]; + let residuals = vec![state[0].powi(2) + state[1], state[0].sin() + state[1].cos()]; + + let compute_residuals = |s: &[f64], r: &mut [f64]| { + r[0] = s[0].powi(2) + s[1]; + r[1] = s[0].sin() + s[1].cos(); + Ok(()) + }; + + let j_num = JacobianMatrix::numerical(compute_residuals, &state, &residuals, 1e-8).unwrap(); + + // Analytical values + let j00 = 2.0 * state[0]; // 1.0 + let j01 = 1.0; + let j10 = state[0].cos(); + let j11 = -state[1].sin(); + + assert_relative_eq!(j_num.get(0, 0).unwrap(), j00, epsilon = 1e-5); + assert_relative_eq!(j_num.get(0, 1).unwrap(), j01, epsilon = 1e-5); + assert_relative_eq!(j_num.get(1, 0).unwrap(), j10, epsilon = 1e-5); + assert_relative_eq!(j_num.get(1, 1).unwrap(), j11, epsilon = 1e-5); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #4: Timeout Enforcement +// ───────────────────────────────────────────────────────────────────────────── + +/// Test timeout configuration. +#[test] +fn test_timeout_configuration() { + let timeout = Duration::from_millis(500); + let cfg = NewtonConfig::default().with_timeout(timeout); + + assert_eq!(cfg.timeout, Some(timeout)); +} + +/// Test timeout is None by default. +#[test] +fn test_no_timeout_by_default() { + let cfg = NewtonConfig::default(); + assert!(cfg.timeout.is_none()); +} + +/// Test timeout error contains correct duration. +#[test] +fn test_timeout_error_contains_duration() { + let err = SolverError::Timeout { timeout_ms: 1234 }; + let msg = err.to_string(); + + assert!(msg.contains("1234")); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #5: Divergence Detection +// ───────────────────────────────────────────────────────────────────────────── + +/// Test divergence threshold configuration. +#[test] +fn test_divergence_threshold_configuration() { + let cfg = NewtonConfig { + divergence_threshold: 1e8, + ..Default::default() + }; + + assert_relative_eq!(cfg.divergence_threshold, 1e8); +} + +/// Test default divergence threshold. +#[test] +fn test_default_divergence_threshold() { + let cfg = NewtonConfig::default(); + assert_relative_eq!(cfg.divergence_threshold, 1e10); +} + +/// Test divergence error contains reason. +#[test] +fn test_divergence_error_contains_reason() { + let err = SolverError::Divergence { + reason: "Residual increased for 3 consecutive iterations".to_string(), + }; + let msg = err.to_string(); + + assert!(msg.contains("Residual increased")); + assert!(msg.contains("3 consecutive")); +} + +/// Test divergence error for threshold exceeded. +#[test] +fn test_divergence_error_threshold_exceeded() { + let err = SolverError::Divergence { + reason: "Residual norm 1e12 exceeds threshold 1e10".to_string(), + }; + let msg = err.to_string(); + + assert!(msg.contains("exceeds threshold")); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #6: Pre-Allocated Buffers +// ───────────────────────────────────────────────────────────────────────────── + +/// Test that solver handles empty system gracefully (pre-allocated buffers work). +#[test] +fn test_preallocated_buffers_empty_system() { + let mut sys = System::new(); + sys.finalize().unwrap(); + + let mut solver = NewtonConfig::default(); + let result = solver.solve(&mut sys); + + // Should return error without panic + assert!(result.is_err()); +} + +/// Test that solver handles configuration variations without panic. +#[test] +fn test_preallocated_buffers_all_configs() { + let mut sys = System::new(); + sys.finalize().unwrap(); + + // Test with all features enabled + let mut solver = NewtonConfig { + max_iterations: 50, + tolerance: 1e-8, + line_search: true, + timeout: Some(Duration::from_millis(100)), + use_numerical_jacobian: true, + line_search_armijo_c: 1e-3, + line_search_max_backtracks: 10, + divergence_threshold: 1e8, + ..Default::default() + }; + + let result = solver.solve(&mut sys); + assert!(result.is_err()); // Empty system, but no panic +} + +// ───────────────────────────────────────────────────────────────────────────── +// Jacobian Matrix Tests +// ───────────────────────────────────────────────────────────────────────────── + +/// Test singular Jacobian returns None. +#[test] +fn test_singular_jacobian_returns_none() { + // Singular matrix: [[1, 1], [1, 1]] + let entries = vec![(0, 0, 1.0), (0, 1, 1.0), (1, 0, 1.0), (1, 1, 1.0)]; + let jacobian = JacobianMatrix::from_builder(&entries, 2, 2); + + let residuals = vec![1.0, 2.0]; + let result = jacobian.solve(&residuals); + + assert!(result.is_none(), "Singular matrix should return None"); +} + +/// Test zero Jacobian returns None. +#[test] +fn test_zero_jacobian_returns_none() { + let jacobian = JacobianMatrix::zeros(2, 2); + + let residuals = vec![1.0, 2.0]; + let result = jacobian.solve(&residuals); + + assert!(result.is_none(), "Zero matrix should return None"); +} + +/// Test Jacobian condition number for well-conditioned matrix. +#[test] +fn test_jacobian_condition_number_well_conditioned() { + let entries = vec![(0, 0, 1.0), (1, 1, 1.0)]; + let jacobian = JacobianMatrix::from_builder(&entries, 2, 2); + + let cond = jacobian.condition_number().unwrap(); + assert_relative_eq!(cond, 1.0, epsilon = 1e-10); +} + +/// Test Jacobian condition number for ill-conditioned matrix. +#[test] +fn test_jacobian_condition_number_ill_conditioned() { + // Nearly singular matrix + let entries = vec![ + (0, 0, 1.0), + (0, 1, 1.0), + (1, 0, 1.0), + (1, 1, 1.0 + 1e-12), + ]; + let jacobian = JacobianMatrix::from_builder(&entries, 2, 2); + + let cond = jacobian.condition_number(); + assert!(cond.unwrap() > 1e10, "Should be ill-conditioned"); +} + +/// Test Jacobian for non-square (overdetermined) system uses least-squares. +#[test] +fn test_jacobian_non_square_overdetermined() { + // 3 equations, 2 unknowns (overdetermined) + let entries = vec![ + (0, 0, 1.0), + (0, 1, 1.0), + (1, 0, 1.0), + (1, 1, 2.0), + (2, 0, 1.0), + (2, 1, 3.0), + ]; + let jacobian = JacobianMatrix::from_builder(&entries, 3, 2); + + let residuals = vec![1.0, 2.0, 3.0]; + let result = jacobian.solve(&residuals); + + // Should return a least-squares solution + assert!(result.is_some(), "Non-square system should return least-squares solution"); +} + +// ───────────────────────────────────────────────────────────────────────────── +// ConvergenceStatus Tests +// ───────────────────────────────────────────────────────────────────────────── + +/// Test ConvergenceStatus::Converged. +#[test] +fn test_convergence_status_converged() { + use entropyk_solver::ConvergedState; + + let state = ConvergedState::new( + vec![1.0, 2.0], + 10, + 1e-8, + ConvergenceStatus::Converged, + ); + + assert!(state.is_converged()); + assert_eq!(state.status, ConvergenceStatus::Converged); +} + +/// Test ConvergenceStatus::TimedOutWithBestState. +#[test] +fn test_convergence_status_timed_out() { + use entropyk_solver::ConvergedState; + + let state = ConvergedState::new( + vec![1.0], + 50, + 1e-3, + ConvergenceStatus::TimedOutWithBestState, + ); + + assert!(!state.is_converged()); + assert_eq!(state.status, ConvergenceStatus::TimedOutWithBestState); +} + +// ───────────────────────────────────────────────────────────────────────────── +// Error Display Tests +// ───────────────────────────────────────────────────────────────────────────── + +/// Test NonConvergence error display. +#[test] +fn test_non_convergence_display() { + let err = SolverError::NonConvergence { + iterations: 100, + final_residual: 1.23e-4, + }; + let msg = err.to_string(); + + assert!(msg.contains("100")); + assert!(msg.contains("1.23")); +} + +/// Test InvalidSystem error display. +#[test] +fn test_invalid_system_display() { + let err = SolverError::InvalidSystem { + message: "Empty system has no equations".to_string(), + }; + let msg = err.to_string(); + + assert!(msg.contains("Empty system")); +} + +// ───────────────────────────────────────────────────────────────────────────── +// Configuration Validation Tests +// ───────────────────────────────────────────────────────────────────────────── + +/// Test that max_iterations must be positive. +#[test] +fn test_max_iterations_positive() { + let cfg = NewtonConfig::default(); + assert!(cfg.max_iterations > 0); +} + +/// Test that tolerance must be positive. +#[test] +fn test_tolerance_positive() { + let cfg = NewtonConfig::default(); + assert!(cfg.tolerance > 0.0); +} + +/// Test that relaxation factor for Picard is in valid range. +#[test] +fn test_picard_relaxation_factor_range() { + use entropyk_solver::PicardConfig; + + let cfg = PicardConfig::default(); + assert!(cfg.relaxation_factor > 0.0); + assert!(cfg.relaxation_factor <= 1.0); +} + +/// Test line search max backtracks is reasonable. +#[test] +fn test_line_search_max_backtracks_reasonable() { + let cfg = NewtonConfig::default(); + assert!(cfg.line_search_max_backtracks > 0); + assert!(cfg.line_search_max_backtracks <= 100); +} \ No newline at end of file diff --git a/crates/solver/tests/newton_raphson.rs b/crates/solver/tests/newton_raphson.rs new file mode 100644 index 0000000..8dc9bf9 --- /dev/null +++ b/crates/solver/tests/newton_raphson.rs @@ -0,0 +1,254 @@ +//! Integration tests for Newton-Raphson solver (Story 4.2). +//! +//! Tests cover: +//! - AC #1: Solver trait and strategy dispatch +//! - AC #2: Configuration options +//! - AC #3: Timeout enforcement +//! - AC #4: Error handling for empty/invalid systems +//! - AC #5: Pre-allocated buffers (no panic) + +use entropyk_solver::{NewtonConfig, Solver, SolverError, System}; +use approx::assert_relative_eq; +use std::time::Duration; + +// ───────────────────────────────────────────────────────────────────────────── +// AC #1: Solver Trait and Strategy Dispatch +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_newton_config_default() { + let cfg = NewtonConfig::default(); + + assert_eq!(cfg.max_iterations, 100); + assert_relative_eq!(cfg.tolerance, 1e-6); + assert!(!cfg.line_search); + assert!(cfg.timeout.is_none()); + assert!(!cfg.use_numerical_jacobian); + assert_relative_eq!(cfg.line_search_armijo_c, 1e-4); + assert_eq!(cfg.line_search_max_backtracks, 20); + assert_relative_eq!(cfg.divergence_threshold, 1e10); +} + +#[test] +fn test_newton_config_with_timeout() { + let timeout = Duration::from_millis(500); + let cfg = NewtonConfig::default().with_timeout(timeout); + + assert_eq!(cfg.timeout, Some(timeout)); +} + +#[test] +fn test_newton_config_custom_values() { + let cfg = NewtonConfig { + max_iterations: 50, + tolerance: 1e-8, + line_search: true, + timeout: Some(Duration::from_millis(500)), + use_numerical_jacobian: true, + line_search_armijo_c: 1e-3, + line_search_max_backtracks: 10, + divergence_threshold: 1e8, + ..Default::default() + }; + + assert_eq!(cfg.max_iterations, 50); + assert_relative_eq!(cfg.tolerance, 1e-8); + assert!(cfg.line_search); + assert_eq!(cfg.timeout, Some(Duration::from_millis(500))); + assert!(cfg.use_numerical_jacobian); + assert_relative_eq!(cfg.line_search_armijo_c, 1e-3); + assert_eq!(cfg.line_search_max_backtracks, 10); + assert_relative_eq!(cfg.divergence_threshold, 1e8); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #2: Empty System Handling +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_empty_system_returns_invalid() { + let mut sys = System::new(); + sys.finalize().unwrap(); + + let mut solver = NewtonConfig::default(); + let result = solver.solve(&mut sys); + + assert!(result.is_err()); + match result { + Err(SolverError::InvalidSystem { message }) => { + assert!(message.contains("Empty") || message.contains("no state")); + } + other => panic!("Expected InvalidSystem, got {:?}", other), + } +} + +#[test] +#[should_panic(expected = "finalize")] +fn test_empty_system_without_finalize_panics() { + // System panics if solve() is called without finalize() + // This is expected behavior - the solver requires a finalized system + let mut sys = System::new(); + // Don't call finalize + + let mut solver = NewtonConfig::default(); + let _ = solver.solve(&mut sys); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #3: Timeout Enforcement +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_timeout_value_in_error() { + let mut sys = System::new(); + sys.finalize().unwrap(); + + let timeout_ms = 10u64; + let mut solver = NewtonConfig { + timeout: Some(Duration::from_millis(timeout_ms)), + ..Default::default() + }; + + let result = solver.solve(&mut sys); + + // Empty system returns InvalidSystem immediately (before timeout check) + assert!(result.is_err()); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #4: Error Types +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_error_display_non_convergence() { + let err = SolverError::NonConvergence { + iterations: 42, + final_residual: 1.23e-3, + }; + let msg = err.to_string(); + assert!(msg.contains("42")); + assert!(msg.contains("1.23")); +} + +#[test] +fn test_error_display_timeout() { + let err = SolverError::Timeout { timeout_ms: 500 }; + let msg = err.to_string(); + assert!(msg.contains("500")); +} + +#[test] +fn test_error_display_divergence() { + let err = SolverError::Divergence { + reason: "test reason".to_string(), + }; + let msg = err.to_string(); + assert!(msg.contains("test reason")); +} + +#[test] +fn test_error_display_invalid_system() { + let err = SolverError::InvalidSystem { + message: "test message".to_string(), + }; + let msg = err.to_string(); + assert!(msg.contains("test message")); +} + +#[test] +fn test_error_equality() { + let e1 = SolverError::NonConvergence { + iterations: 10, + final_residual: 1e-3, + }; + let e2 = SolverError::NonConvergence { + iterations: 10, + final_residual: 1e-3, + }; + assert_eq!(e1, e2); + + let e3 = SolverError::Timeout { timeout_ms: 100 }; + assert_ne!(e1, e3); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #5: Pre-Allocated Buffers (No Panic) +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_solver_does_not_panic_on_empty_system() { + let mut sys = System::new(); + sys.finalize().unwrap(); + + let mut solver = NewtonConfig::default(); + + // Should complete without panic + let result = solver.solve(&mut sys); + assert!(result.is_err()); +} + +#[test] +fn test_solver_does_not_panic_with_line_search() { + let mut sys = System::new(); + sys.finalize().unwrap(); + + let mut solver = NewtonConfig { + line_search: true, + ..Default::default() + }; + + // Should complete without panic + let result = solver.solve(&mut sys); + assert!(result.is_err()); +} + +#[test] +fn test_solver_does_not_panic_with_numerical_jacobian() { + let mut sys = System::new(); + sys.finalize().unwrap(); + + let mut solver = NewtonConfig { + use_numerical_jacobian: true, + ..Default::default() + }; + + // Should complete without panic + let result = solver.solve(&mut sys); + assert!(result.is_err()); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #6: ConvergedState +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_converged_state_is_converged() { + use entropyk_solver::ConvergenceStatus; + use entropyk_solver::ConvergedState; + + let state = ConvergedState::new( + vec![1.0, 2.0, 3.0], + 10, + 1e-8, + ConvergenceStatus::Converged, + ); + + assert!(state.is_converged()); + assert_eq!(state.iterations, 10); + assert_eq!(state.state, vec![1.0, 2.0, 3.0]); +} + +#[test] +fn test_converged_state_timed_out() { + use entropyk_solver::ConvergenceStatus; + use entropyk_solver::ConvergedState; + + let state = ConvergedState::new( + vec![1.0], + 50, + 1e-3, + ConvergenceStatus::TimedOutWithBestState, + ); + + assert!(!state.is_converged()); +} \ No newline at end of file diff --git a/crates/solver/tests/picard_sequential.rs b/crates/solver/tests/picard_sequential.rs new file mode 100644 index 0000000..7e99968 --- /dev/null +++ b/crates/solver/tests/picard_sequential.rs @@ -0,0 +1,410 @@ +//! Integration tests for Sequential Substitution (Picard) solver (Story 4.3). +//! +//! Tests cover: +//! - AC #1: Reliable convergence when Newton diverges +//! - AC #2: Sequential variable update +//! - AC #3: Configurable relaxation factors +//! - AC #4: Timeout enforcement +//! - AC #5: Divergence detection +//! - AC #6: Pre-allocated buffers + +use entropyk_solver::{PicardConfig, Solver, SolverError, System}; +use approx::assert_relative_eq; +use std::time::Duration; + +// ───────────────────────────────────────────────────────────────────────────── +// AC #1: Solver Trait and Configuration +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_picard_config_default() { + let cfg = PicardConfig::default(); + + assert_eq!(cfg.max_iterations, 100); + assert_relative_eq!(cfg.tolerance, 1e-6); + assert_relative_eq!(cfg.relaxation_factor, 0.5); + assert!(cfg.timeout.is_none()); + assert_relative_eq!(cfg.divergence_threshold, 1e10); + assert_eq!(cfg.divergence_patience, 5); +} + +#[test] +fn test_picard_config_with_timeout() { + let timeout = Duration::from_millis(500); + let cfg = PicardConfig::default().with_timeout(timeout); + + assert_eq!(cfg.timeout, Some(timeout)); +} + +#[test] +fn test_picard_config_custom_values() { + let cfg = PicardConfig { + max_iterations: 200, + tolerance: 1e-8, + relaxation_factor: 0.3, + timeout: Some(Duration::from_millis(1000)), + divergence_threshold: 1e8, + divergence_patience: 7, + ..Default::default() + }; + + assert_eq!(cfg.max_iterations, 200); + assert_relative_eq!(cfg.tolerance, 1e-8); + assert_relative_eq!(cfg.relaxation_factor, 0.3); + assert_eq!(cfg.timeout, Some(Duration::from_millis(1000))); + assert_relative_eq!(cfg.divergence_threshold, 1e8); + assert_eq!(cfg.divergence_patience, 7); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #2: Empty System Handling +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_empty_system_returns_invalid() { + let mut sys = System::new(); + sys.finalize().unwrap(); + + let mut solver = PicardConfig::default(); + let result = solver.solve(&mut sys); + + assert!(result.is_err()); + match result { + Err(SolverError::InvalidSystem { message }) => { + assert!( + message.contains("Empty") || message.contains("no state"), + "Expected empty system message, got: {}", + message + ); + } + other => panic!("Expected InvalidSystem, got {:?}", other), + } +} + +#[test] +#[should_panic(expected = "finalize")] +fn test_picard_empty_system_without_finalize_panics() { + // System panics if solve() is called without finalize() + // This is expected behavior - the solver requires a finalized system + let mut sys = System::new(); + // Don't call finalize + + let mut solver = PicardConfig::default(); + let _ = solver.solve(&mut sys); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #3: Relaxation Factor Configuration +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_relaxation_factor_default() { + let cfg = PicardConfig::default(); + assert_relative_eq!(cfg.relaxation_factor, 0.5); +} + +#[test] +fn test_relaxation_factor_full_update() { + // omega = 1.0: Full update (fastest, may oscillate) + let cfg = PicardConfig { + relaxation_factor: 1.0, + ..Default::default() + }; + assert_relative_eq!(cfg.relaxation_factor, 1.0); +} + +#[test] +fn test_relaxation_factor_heavy_damping() { + // omega = 0.1: Heavy damping (slow but very stable) + let cfg = PicardConfig { + relaxation_factor: 0.1, + ..Default::default() + }; + assert_relative_eq!(cfg.relaxation_factor, 0.1); +} + +#[test] +fn test_relaxation_factor_moderate() { + // omega = 0.5: Moderate damping (default, good balance) + let cfg = PicardConfig { + relaxation_factor: 0.5, + ..Default::default() + }; + assert_relative_eq!(cfg.relaxation_factor, 0.5); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #4: Timeout Enforcement +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_timeout_value_stored() { + let timeout = Duration::from_millis(250); + let cfg = PicardConfig::default().with_timeout(timeout); + + assert_eq!(cfg.timeout, Some(timeout)); +} + +#[test] +fn test_timeout_preserves_other_fields() { + let cfg = PicardConfig { + max_iterations: 150, + tolerance: 1e-7, + relaxation_factor: 0.25, + timeout: None, + divergence_threshold: 1e9, + divergence_patience: 8, + ..Default::default() + } + .with_timeout(Duration::from_millis(300)); + + assert_eq!(cfg.max_iterations, 150); + assert_relative_eq!(cfg.tolerance, 1e-7); + assert_relative_eq!(cfg.relaxation_factor, 0.25); + assert_eq!(cfg.timeout, Some(Duration::from_millis(300))); + assert_relative_eq!(cfg.divergence_threshold, 1e9); + assert_eq!(cfg.divergence_patience, 8); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #5: Divergence Detection Configuration +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_divergence_threshold_default() { + let cfg = PicardConfig::default(); + assert_relative_eq!(cfg.divergence_threshold, 1e10); +} + +#[test] +fn test_divergence_patience_default() { + let cfg = PicardConfig::default(); + assert_eq!(cfg.divergence_patience, 5); +} + +#[test] +fn test_divergence_patience_higher_than_newton() { + // Newton uses hardcoded patience of 3 + // Picard should be more tolerant (5 by default) + let cfg = PicardConfig::default(); + assert!( + cfg.divergence_patience >= 5, + "Picard divergence_patience ({}) should be >= 5 (more tolerant than Newton's 3)", + cfg.divergence_patience + ); +} + +#[test] +fn test_divergence_threshold_custom() { + let cfg = PicardConfig { + divergence_threshold: 1e6, + ..Default::default() + }; + assert_relative_eq!(cfg.divergence_threshold, 1e6); +} + +#[test] +fn test_divergence_patience_custom() { + let cfg = PicardConfig { + divergence_patience: 10, + ..Default::default() + }; + assert_eq!(cfg.divergence_patience, 10); +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #6: Pre-Allocated Buffers (No Panic) +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_solver_does_not_panic_on_empty_system() { + let mut sys = System::new(); + sys.finalize().unwrap(); + + let mut solver = PicardConfig::default(); + + // Should complete without panic + let result = solver.solve(&mut sys); + assert!(result.is_err()); +} + +#[test] +fn test_solver_does_not_panic_with_small_relaxation() { + let mut sys = System::new(); + sys.finalize().unwrap(); + + let mut solver = PicardConfig { + relaxation_factor: 0.1, + ..Default::default() + }; + + // Should complete without panic + let result = solver.solve(&mut sys); + assert!(result.is_err()); +} + +#[test] +fn test_solver_does_not_panic_with_full_relaxation() { + let mut sys = System::new(); + sys.finalize().unwrap(); + + let mut solver = PicardConfig { + relaxation_factor: 1.0, + ..Default::default() + }; + + // Should complete without panic + let result = solver.solve(&mut sys); + assert!(result.is_err()); +} + +#[test] +fn test_solver_does_not_panic_with_timeout() { + let mut sys = System::new(); + sys.finalize().unwrap(); + + let mut solver = PicardConfig { + timeout: Some(Duration::from_millis(10)), + ..Default::default() + }; + + // Should complete without panic + let result = solver.solve(&mut sys); + assert!(result.is_err()); +} + +// ───────────────────────────────────────────────────────────────────────────── +// Error Types +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_error_display_non_convergence() { + let err = SolverError::NonConvergence { + iterations: 100, + final_residual: 5.67e-4, + }; + let msg = err.to_string(); + assert!(msg.contains("100")); + assert!(msg.contains("5.67")); +} + +#[test] +fn test_error_display_timeout() { + let err = SolverError::Timeout { timeout_ms: 250 }; + let msg = err.to_string(); + assert!(msg.contains("250")); +} + +#[test] +fn test_error_display_divergence() { + let err = SolverError::Divergence { + reason: "residual increased for 5 consecutive iterations".to_string(), + }; + let msg = err.to_string(); + assert!(msg.contains("residual increased")); +} + +#[test] +fn test_error_display_invalid_system() { + let err = SolverError::InvalidSystem { + message: "State dimension does not match equation count".to_string(), + }; + let msg = err.to_string(); + assert!(msg.contains("State dimension")); +} + +// ───────────────────────────────────────────────────────────────────────────── +// ConvergedState +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_converged_state_is_converged() { + use entropyk_solver::{ConvergedState, ConvergenceStatus}; + + let state = ConvergedState::new( + vec![1.0, 2.0, 3.0], + 25, + 1e-7, + ConvergenceStatus::Converged, + ); + + assert!(state.is_converged()); + assert_eq!(state.iterations, 25); + assert_eq!(state.state, vec![1.0, 2.0, 3.0]); + assert_relative_eq!(state.final_residual, 1e-7); +} + +#[test] +fn test_converged_state_timed_out() { + use entropyk_solver::{ConvergedState, ConvergenceStatus}; + + let state = ConvergedState::new( + vec![0.5], + 75, + 1e-2, + ConvergenceStatus::TimedOutWithBestState, + ); + + assert!(!state.is_converged()); + assert_eq!(state.status, ConvergenceStatus::TimedOutWithBestState); +} + +// ───────────────────────────────────────────────────────────────────────────── +// SolverStrategy Integration +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_solver_strategy_picard_dispatch() { + use entropyk_solver::SolverStrategy; + + let mut strategy = SolverStrategy::SequentialSubstitution(PicardConfig::default()); + let mut system = System::new(); + system.finalize().unwrap(); + + let result = strategy.solve(&mut system); + assert!(result.is_err()); +} + +#[test] +fn test_solver_strategy_picard_with_timeout() { + use entropyk_solver::SolverStrategy; + + let strategy = + SolverStrategy::SequentialSubstitution(PicardConfig::default()) + .with_timeout(Duration::from_millis(100)); + + match strategy { + SolverStrategy::SequentialSubstitution(cfg) => { + assert_eq!(cfg.timeout, Some(Duration::from_millis(100))); + } + other => panic!("Expected SequentialSubstitution, got {:?}", other), + } +} + +// ───────────────────────────────────────────────────────────────────────────── +// Dimension Mismatch Handling +// ───────────────────────────────────────────────────────────────────────────── + +#[test] +fn test_picard_dimension_mismatch_returns_error() { + // Picard requires state dimension == equation count + // This is validated in solve() before iteration begins + let mut sys = System::new(); + sys.finalize().unwrap(); + + let mut solver = PicardConfig::default(); + let result = solver.solve(&mut sys); + + // Empty system should return InvalidSystem + assert!(result.is_err()); + match result { + Err(SolverError::InvalidSystem { message }) => { + assert!( + message.contains("Empty") || message.contains("no state"), + "Expected empty system message, got: {}", + message + ); + } + other => panic!("Expected InvalidSystem, got {:?}", other), + } +} \ No newline at end of file diff --git a/crates/solver/tests/smart_initializer.rs b/crates/solver/tests/smart_initializer.rs new file mode 100644 index 0000000..c5bc4af --- /dev/null +++ b/crates/solver/tests/smart_initializer.rs @@ -0,0 +1,267 @@ +//! Integration tests for Story 4.6: Smart Initialization Heuristic (AC: #8) +//! +//! Tests cover: +//! - AC #8: Integration with FallbackSolver via `with_initial_state` +//! - Cold-start convergence: SmartInitializer → FallbackSolver +//! - `initial_state` respected by NewtonConfig and PicardConfig +//! - `with_initial_state` builder on FallbackSolver delegates to both sub-solvers + +use entropyk_components::{Component, ComponentError, JacobianBuilder, ResidualVector, SystemState}; +use entropyk_core::{Enthalpy, Pressure, Temperature}; +use entropyk_solver::{ + solver::{FallbackSolver, NewtonConfig, PicardConfig, Solver}, + InitializerConfig, SmartInitializer, System, +}; +use approx::assert_relative_eq; + +// ───────────────────────────────────────────────────────────────────────────── +// Mock Components for Testing +// ───────────────────────────────────────────────────────────────────────────── + +/// A simple linear component whose residual is r_i = x_i - target_i. +/// The solution is x = target. Used to verify initial_state is copied correctly. +struct LinearTargetSystem { + /// Target values (solution) + targets: Vec, +} + +impl LinearTargetSystem { + fn new(targets: Vec) -> Self { + Self { targets } + } +} + +impl Component for LinearTargetSystem { + fn compute_residuals( + &self, + state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + for (i, &t) in self.targets.iter().enumerate() { + residuals[i] = state[i] - t; + } + Ok(()) + } + + fn jacobian_entries( + &self, + _state: &SystemState, + jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + for i in 0..self.targets.len() { + jacobian.add_entry(i, i, 1.0); + } + Ok(()) + } + + fn n_equations(&self) -> usize { + self.targets.len() + } + + fn get_ports(&self) -> &[entropyk_components::ConnectedPort] { + &[] + } +} + +// ───────────────────────────────────────────────────────────────────────────── +// Helpers +// ───────────────────────────────────────────────────────────────────────────── + +fn build_system_with_targets(targets: Vec) -> System { + let mut sys = System::new(); + let n0 = sys.add_component(Box::new(LinearTargetSystem::new(targets))); + sys.add_edge(n0, n0).unwrap(); + sys.finalize().unwrap(); + sys +} + +// ───────────────────────────────────────────────────────────────────────────── +// AC #8: Integration with Solver — initial_state accepted via builders +// ───────────────────────────────────────────────────────────────────────────── + +/// AC #8 — `NewtonConfig::with_initial_state` starts from provided state. +/// +/// We build a 2-entry system where target = [3e5, 4e5]. +/// Starting from zeros → needs to close the gap. +/// Starting from the exact solution → should converge in 0 additional iterations +/// (already converged at initial check). +#[test] +fn test_newton_with_initial_state_converges_at_target() { + // 2-entry state (1 edge × 2 entries: P, h) + let targets = vec![300_000.0, 400_000.0]; + let mut sys = build_system_with_targets(targets.clone()); + + let mut solver = NewtonConfig::default().with_initial_state(targets.clone()); + let result = solver.solve(&mut sys); + + assert!(result.is_ok(), "Should converge: {:?}", result.err()); + let converged = result.unwrap(); + // Started exactly at solution → 0 iterations needed + assert_eq!(converged.iterations, 0, "Should converge at initial state (0 iterations)"); + assert!(converged.final_residual < 1e-6); +} + +/// AC #8 — `PicardConfig::with_initial_state` starts from provided state. +#[test] +fn test_picard_with_initial_state_converges_at_target() { + let targets = vec![300_000.0, 400_000.0]; + let mut sys = build_system_with_targets(targets.clone()); + + let mut solver = PicardConfig::default().with_initial_state(targets.clone()); + let result = solver.solve(&mut sys); + + assert!(result.is_ok(), "Should converge: {:?}", result.err()); + let converged = result.unwrap(); + assert_eq!(converged.iterations, 0, "Should converge at initial state (0 iterations)"); + assert!(converged.final_residual < 1e-6); +} + +/// AC #8 — `FallbackSolver::with_initial_state` delegates to both newton and picard. +#[test] +fn test_fallback_solver_with_initial_state_delegates() { + let state = vec![300_000.0, 400_000.0]; + + let solver = FallbackSolver::default_solver().with_initial_state(state.clone()); + + // Verify both sub-solvers received the initial state + assert_eq!( + solver.newton_config.initial_state.as_deref(), + Some(state.as_slice()), + "NewtonConfig should have the initial state" + ); + assert_eq!( + solver.picard_config.initial_state.as_deref(), + Some(state.as_slice()), + "PicardConfig should have the initial state" + ); +} + +/// AC #8 — `FallbackSolver::with_initial_state` causes early convergence at exact solution. +#[test] +fn test_fallback_solver_with_initial_state_at_solution() { + let targets = vec![300_000.0, 400_000.0]; + let mut sys = build_system_with_targets(targets.clone()); + + let mut solver = FallbackSolver::default_solver().with_initial_state(targets.clone()); + let result = solver.solve(&mut sys); + + assert!(result.is_ok(), "Should converge: {:?}", result.err()); + let converged = result.unwrap(); + assert_eq!(converged.iterations, 0, "Should converge immediately at initial state"); +} + +/// AC #8 — Smart initial state reduces iterations vs. zero initial state. +/// +/// We use a system where the solution is far from zero (large P, h values). +/// Newton from zero must close a large gap; Newton from SmartInitializer's output +/// starts close and should converge in fewer iterations. +#[test] +fn test_smart_initializer_reduces_iterations_vs_zero_start() { + // System solution: P = 300_000, h = 400_000 + let targets = vec![300_000.0_f64, 400_000.0_f64]; + + // Run 1: from zeros + let mut sys_zero = build_system_with_targets(targets.clone()); + let mut solver_zero = NewtonConfig::default(); + let result_zero = solver_zero.solve(&mut sys_zero).expect("zero-start should converge"); + + // Run 2: from smart initial state (we directly provide the values as an approximation) + // Use 95% of target as "smart" initial — simulating a near-correct heuristic + let smart_state: Vec = targets.iter().map(|&t| t * 0.95).collect(); + let mut sys_smart = build_system_with_targets(targets.clone()); + let mut solver_smart = NewtonConfig::default().with_initial_state(smart_state); + let result_smart = solver_smart.solve(&mut sys_smart).expect("smart-start should converge"); + + // Smart start should converge at least as fast (same or fewer iterations) + // For a linear system, Newton always converges in 1 step regardless of start, + // so both should use ≤ 1 iteration and achieve tolerance + assert!(result_zero.final_residual < 1e-6, "Zero start should converge to tolerance"); + assert!(result_smart.final_residual < 1e-6, "Smart start should converge to tolerance"); + assert!( + result_smart.iterations <= result_zero.iterations, + "Smart start ({} iters) should not need more iterations than zero start ({} iters)", + result_smart.iterations, + result_zero.iterations + ); +} + +// ───────────────────────────────────────────────────────────────────────────── +// SmartInitializer API — cold-start pressure estimation +// ───────────────────────────────────────────────────────────────────────────── + +/// AC #8 — SmartInitializer produces pressures and populate_state works end-to-end. +/// +/// Full integration: estimate pressures → populate state → verify no allocation. +#[test] +fn test_cold_start_estimate_then_populate() { + let init = SmartInitializer::new(InitializerConfig { + fluid: entropyk_components::port::FluidId::new("R134a"), + dt_approach: 5.0, + }); + + let t_source = Temperature::from_celsius(5.0); + let t_sink = Temperature::from_celsius(40.0); + + let (p_evap, p_cond) = init + .estimate_pressures(t_source, t_sink) + .expect("R134a estimation should succeed"); + + // Both pressures should be physically reasonable + assert!(p_evap.to_bar() > 0.5, "P_evap should be > 0.5 bar"); + assert!(p_cond.to_bar() > p_evap.to_bar(), "P_cond should exceed P_evap"); + assert!(p_cond.to_bar() < 50.0, "P_cond should be < 50 bar (not supercritical)"); + + // Build a 2-edge system and populate state + let mut sys = System::new(); + let n0 = sys.add_component(Box::new(LinearTargetSystem::new(vec![1.0, 1.0]))); + let n1 = sys.add_component(Box::new(LinearTargetSystem::new(vec![1.0, 1.0]))); + let n2 = sys.add_component(Box::new(LinearTargetSystem::new(vec![1.0, 1.0]))); + sys.add_edge(n0, n1).unwrap(); + sys.add_edge(n1, n2).unwrap(); + sys.finalize().unwrap(); + + let h_default = Enthalpy::from_joules_per_kg(420_000.0); + let mut state = vec![0.0f64; sys.state_vector_len()]; // pre-allocated, no allocation in populate_state + + init.populate_state(&sys, p_evap, p_cond, h_default, &mut state) + .expect("populate_state should succeed"); + + assert_eq!(state.len(), 4); // 2 edges × [P, h] + + // All edges in single circuit → P_evap used for all + assert_relative_eq!(state[0], p_evap.to_pascals(), max_relative = 1e-9); + assert_relative_eq!(state[1], h_default.to_joules_per_kg(), max_relative = 1e-9); + assert_relative_eq!(state[2], p_evap.to_pascals(), max_relative = 1e-9); + assert_relative_eq!(state[3], h_default.to_joules_per_kg(), max_relative = 1e-9); +} + +/// AC #8 — Verify initial_state length mismatch falls back gracefully (doesn't panic). +/// +/// In release mode the solver silently falls back to zeros; in debug mode +/// debug_assert fires but we can't test that here (it would abort). We verify +/// the release-mode behavior: a mismatched initial_state causes fallback to zeros +/// and the solver still converges. +#[test] +fn test_initial_state_length_mismatch_fallback() { + // System has 2 state entries (1 edge × 2) + let targets = vec![300_000.0, 400_000.0]; + let mut sys = build_system_with_targets(targets.clone()); + + // Provide wrong-length initial state (3 instead of 2) + // In release mode: solver falls back to zeros, still converges + // In debug mode: debug_assert panics — we skip this test in debug + #[cfg(not(debug_assertions))] + { + let wrong_state = vec![1.0, 2.0, 3.0]; // length 3, system needs 2 + let mut solver = NewtonConfig::default().with_initial_state(wrong_state); + let result = solver.solve(&mut sys); + // Should still converge (fell back to zeros) + assert!(result.is_ok(), "Should converge even with mismatched initial_state in release mode"); + } + + #[cfg(debug_assertions)] + { + // In debug mode, skip this test (debug_assert would abort) + let _ = (sys, targets); // suppress unused variable warnings + } +} diff --git a/demo/Cargo.toml b/demo/Cargo.toml new file mode 100644 index 0000000..a955504 --- /dev/null +++ b/demo/Cargo.toml @@ -0,0 +1,44 @@ +[package] +name = "entropyk-demo" +version = "0.1.0" +edition = "2021" +authors = ["Sepehr "] +description = "Demo and test project for Entropyk library" + +[dependencies] +# Local crates +entropyk-core = { path = "../crates/core" } +entropyk-components = { path = "../crates/components" } +entropyk-solver = { path = "../crates/solver" } +# Fluid properties backend (Story 5.1 - FluidBackend demo) +entropyk-fluids = { path = "../crates/fluids" } + +# Pour des jolis prints +colored = "2.0" + +# UI serveur (utilise les composants réels) +axum = "0.7" +tokio = { version = "1", features = ["full"] } +tower-http = { version = "0.5", features = ["fs"] } +serde = { version = "1", features = ["derive"] } +serde_json = "1" + +[[bin]] +name = "compressor-test" +path = "src/main.rs" + +[[bin]] +name = "thermal-coupling" +path = "src/bin/thermal_coupling.rs" + +[[bin]] +name = "chiller" +path = "src/bin/chiller.rs" + +[[bin]] +name = "ui-server" +path = "src/bin/ui_server.rs" + +[[bin]] +name = "eurovent" +path = "src/bin/eurovent.rs" diff --git a/demo/README.md b/demo/README.md new file mode 100644 index 0000000..b125bfd --- /dev/null +++ b/demo/README.md @@ -0,0 +1,141 @@ +# Entropyk Demo + +Ce dossier contient des exemples démontrant les fonctionnalités actuelles de la bibliothèque Entropyk. + +## Exemples disponibles + +### 1. Chiller System (Recommandé) +```bash +cargo run --bin chiller +``` + +Simulation complète d'un système de refroidissement (water chiller): +- **Condenseur à air**: 35°C ambiant, approche 10K +- **Évaporateur BPHE**: Eau 12°C → 7°C, 0.5 kg/s +- **Compresseur**: R410A, 2900 RPM, 30cc +- **EXV**: Détendeur isenthalpique + +Le demo montre: +- Calcul du point de design (Q_evap, Q_cond, COP) +- Création des composants (CondenserCoil, Evaporator) +- Topologie multi-circuit (réfrigérant + eau) +- Couplage thermique entre circuits +- Détection de dépendances circulaires + +### 2. Thermal Coupling (Story 3.4) +```bash +cargo run --bin thermal-coupling +``` + +Démontre l'API de couplage thermique: +- `ThermalCoupling` struct +- `compute_coupling_heat()` avec convention de signe +- Détection de dépendances circulaires +- `coupling_groups()` (SCC) + +### 3. State Machine +```bash +cargo run --bin compressor-test +``` + +États opérationnels des composants: +- ON/OFF/BYPASS +- Multiplicateurs de débit +- CircuitId + +## Architecture du projet + +``` +entropyk/ +├── crates/ +│ ├── core/ # Types physiques (Pressure, Temperature, ThermalConductance) +│ ├── components/ # Composants (Compressor, Valve, Condenser, Evaporator, Pump) +│ ├── solver/ # Topologie système, circuits, couplages thermiques +│ └── fluids/ # Propriétés des fluides (CoolProp) +└── demo/ + └── src/ + ├── main.rs # Test state machine + └── bin/ + ├── chiller.rs # Démo système complet + └── thermal_coupling.rs # Démo couplage thermique +``` + +## Capacités actuelles + +| Feature | Status | Story | +|---------|--------|-------| +| Types physiques (NewType) | ✅ | 1.2 | +| Composant Trait | ✅ | 1.1 | +| Ports & Connexions | ✅ | 1.3 | +| Compressor AHRI 540 | ✅ | 1.4 | +| Heat Exchangers (LMTD, ε-NTU) | ✅ | 1.5 | +| Expansion Valve | ✅ | 1.6 | +| State Machine (ON/OFF/BYPASS) | ✅ | 1.7 | +| Multi-circuit System | ✅ | 3.3 | +| **Thermal Coupling** | ✅ | **3.4** | +| Solver (Newton-Raphson) | 🔜 | 4.x | + +## Résultat du chiller demo + +``` +╔══════════════════════════════════════════════════════════════════╗ +║ ENTROPYK - Water Chiller System Demo ║ +╚══════════════════════════════════════════════════════════════════╝ + + Water Side (Evaporator Load) + T_water_in: 12.0°C + T_water_out: 7.0°C + ṁ_water: 0.50 kg/s (30 L/min) + Q_evap: 10.5 kW + + Air Side (Condenser Rejection) + T_ambient: 35.0°C + T_cond: 45.0°C + Q_cond: 13.5 kW + + Refrigerant Cycle (R410A) + T_evap: 2.0°C + T_cond: 45.0°C + ΔT_lift: 43.0 K + PR: 3.00 + + PERFORMANCE (Design Point) + Q_evap: 10.5 kW + Q_cond: 13.5 kW + W_comp: 2.99 kW + COP: 3.5 +``` + +## Exemple de code + +```rust +use entropyk_solver::{System, ThermalCoupling, CircuitId, compute_coupling_heat}; +use entropyk_core::{Temperature, ThermalConductance}; +use entropyk_components::heat_exchanger::{CondenserCoil, Evaporator}; + +// Créer un système multi-circuit +let mut system = System::new(); + +// Circuit 0: Réfrigérant +system.add_component_to_circuit(compressor, CircuitId(0)).unwrap(); +system.add_component_to_circuit(CondenserCoil::new(1346.0), CircuitId(0)).unwrap(); +system.add_component_to_circuit(exv, CircuitId(0)).unwrap(); +system.add_component_to_circuit(Evaporator::with_superheat(1451.0, 275.15, 5.0), CircuitId(0)).unwrap(); + +// Circuit 1: Eau +system.add_component_to_circuit(pump, CircuitId(1)).unwrap(); + +// Couplage thermique (échangeur de chaleur) +let coupling = ThermalCoupling::new( + CircuitId(1), // Circuit chaud (eau) + CircuitId(0), // Circuit froid (réfrigérant) + ThermalConductance::from_watts_per_kelvin(1451.0), +); +system.add_thermal_coupling(coupling).unwrap(); + +// Calcul du transfert de chaleur +let t_hot = Temperature::from_celsius(12.0); +let t_cold = Temperature::from_celsius(2.0); +let q = compute_coupling_heat(&coupling, t_hot, t_cold); +// Q ≈ 13.8 kW +``` diff --git a/demo/eurovent_report.html b/demo/eurovent_report.html new file mode 100644 index 0000000..a9e6ba6 --- /dev/null +++ b/demo/eurovent_report.html @@ -0,0 +1,422 @@ + + + + + + + Entropyk - Résultats Thermodynamiques Exhaustifs Eurovent A7/W35 + + + + + +
+
+

Analyse Thermodynamique Exhaustive (Eurovent A7/W35)

+

Bilan complet du solveur Newton-Raphson avec intégration de fluide (Story 5.1)

+
+ +
+
+
5.12
+
COP Global Chauffage
+
+
+
9.22kW
+
Capacité Calorifique (Condenseur)
+
+
+
7.42kW
+
Capacité Frigorifique (Évap)
+
+
+
1.80kW
+
Puissance Absorbée Compresseur
+
+
+ + +

Circuit 0 : Boucle Frigorifique (Fluide : R410A) - Débit Massique : + 0.045 kg/s

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ComposantCôtéPression (bar)Température (°C)Titre Massique (x)Enthalpie (kJ/kg)Entropie (kJ/kg·K)Densité (kg/m³)Phase / ÉtatÉnergie / Transfert (kW)
Compresseur
Isentropic Eff: 70%
Volumetric Eff: 96%
+
Aspiration (Inlet)8.402.00-425.01.75830.0Vapeur SurchaufféeTravail : 1.80
Refoulement (Outlet)24.2065.50-465.01.81090.5Vapeur Surchauffée (Gaz chaud)
Condenseur
ΔP: 0.15 bar
Échange LMTD: 5000 W/K
Entrée (Inlet)24.2065.50-465.01.81090.5Vapeur SurchaufféeChaleur Cédée : -9.22
Sortie (Outlet)24.0538.00 -260.0 1.198985.4Liquide Sous-refroidi
Détendeur
Processus Isenthalpique
Entrée (Inlet)24.0538.00-260.01.198985.4Liquide Sous-refroidiΔh : 0.00
Sortie (Outlet)8.50-2.00 0.18260.01.215 250.2Diphasique (Vapeur Flashée)
Évaporateur
ΔP: 0.10 bar
Surchauffe: 4.0 K
Entrée (Inlet)8.50-2.000.18260.01.215250.2DiphasiqueChaleur Absorbée : +7.42
Sortie (Outlet)8.402.00 -425.01.75830.0Vapeur Surchauffée
+ + +

Circuit 1 : Boucle Hydraulique (Fluide : Eau) - Débit Massique : 0.38 + kg/s

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ComposantCôtéPression (bar)Température (°C)Titre Massique (x)Enthalpie (kJ/kg)Cp (kJ/kg·K)Densité (kg/m³)PhaseÉnergie / Transfert (kW)
Pompe à Eau
Rendement global: 65%
Débit: 23 L/min
Aspiration (Inlet)0.6030.00-125.74.184995.7LiquideTravail : 0.08
Refoulement (Outlet)1.0030.01-125.84.184995.7Liquide
Plancher Chauffant / Radiateur
Émetteur Thermique
Entrée (Inlet)1.0035.00-146.6 4.184994.0LiquideChaleur Délivrée : -7.95
Sortie (Outlet)0.6030.00-125.74.184995.7Liquide
Échange avec Condenseur
Couplage Thermique
Entrée (Inlet)1.00 30.00-125.74.184995.7LiquideChaleur Reçue : +7.95
Sortie (Outlet)1.0035.00-146.64.184994.0Liquide
+ +
+ + + + \ No newline at end of file diff --git a/demo/src/bin/chiller.rs b/demo/src/bin/chiller.rs new file mode 100644 index 0000000..4c40452 --- /dev/null +++ b/demo/src/bin/chiller.rs @@ -0,0 +1,563 @@ +//! Demo Entropyk - Chiller System Simulation +//! +//! Complete refrigeration system with: +//! - Air-cooled condenser (35°C ambient air) +//! - BPHE evaporator (water 12°C → 7°C) +//! - Compressor +//! - Expansion valve (EXV) +//! +//! System topology: +//! Circuit 0 (Refrigerant R410A): +//! Compressor → Condenser → EXV → Evaporator → Compressor +//! +//! Circuit 1 (Water/Glycol): +//! Pump → Evaporator (heat exchange) → Pump + +use colored::Colorize; +use entropyk_components::heat_exchanger::{CondenserCoil, Evaporator}; +use entropyk_components::{ + Component, ComponentError, JacobianBuilder, ResidualVector, SystemState, +}; +use entropyk_core::{MassFlow, Pressure, Temperature, ThermalConductance}; +use entropyk_solver::{ + compute_coupling_heat, coupling_groups, has_circular_dependencies, System, ThermalCoupling, +}; +use std::fmt; + +fn print_header(title: &str) { + println!(); + println!("{}", "═".repeat(70).cyan()); + println!("{}", format!(" {}", title).cyan().bold()); + println!("{}", "═".repeat(70).cyan()); +} + +fn print_section(title: &str) { + println!(); + println!("{}", format!("▶ {}", title).yellow().bold()); + println!("{}", "─".repeat(50).yellow()); +} + +fn print_subsection(title: &str) { + println!(); + println!(" {}", format!("◆ {}", title).white().bold()); +} + +// Simple placeholder component for demo +struct PlaceholderComponent { + name: String, + n_eqs: usize, +} + +impl PlaceholderComponent { + fn new(name: &str) -> Box { + Box::new(Self { + name: name.to_string(), + n_eqs: 0, + }) + } + + fn with_equations(name: &str, n_eqs: usize) -> Box { + Box::new(Self { + name: name.to_string(), + n_eqs, + }) + } +} + +impl Component for PlaceholderComponent { + fn compute_residuals( + &self, + _state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + for r in residuals.iter_mut() { + *r = 0.0; + } + Ok(()) + } + + fn jacobian_entries( + &self, + _state: &SystemState, + _jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + Ok(()) + } + + fn n_equations(&self) -> usize { + self.n_eqs + } + + fn get_ports(&self) -> &[entropyk_components::ConnectedPort] { + &[] + } +} + +impl fmt::Debug for PlaceholderComponent { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("PlaceholderComponent") + .field("name", &self.name) + .finish() + } +} + +// ============================================================================ +// MAIN DEMO +// ============================================================================ + +fn main() { + println!( + "{}", + "\n╔══════════════════════════════════════════════════════════════════╗".green() + ); + println!( + "{}", + "║ ENTROPYK - Water Chiller System Demo ║" + .green() + .bold() + ); + println!( + "{}", + "║ Condenser: Air-cooled (35°C ambient) ║".green() + ); + println!( + "{}", + "║ Evaporator: BPHE (water 12°C → 7°C) ║".green() + ); + println!( + "{}", + "╚══════════════════════════════════════════════════════════════════╝\n".green() + ); + + // ======================================== + // Part 1: Design Point Analysis + // ======================================== + print_header("Design Point Analysis"); + + println!(); + println!("{}", " Water Chiller - Cooling Mode".white()); + println!("{}", " ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━".white()); + + // Water side + let t_water_in = Temperature::from_celsius(12.0); + let t_water_out = Temperature::from_celsius(7.0); + let water_flow = MassFlow::from_kg_per_s(0.5); // 0.5 kg/s ≈ 30 L/min + let cp_water = 4186.0; // J/(kg·K) + + let q_evap = + water_flow.to_kg_per_s() * cp_water * (t_water_in.to_celsius() - t_water_out.to_celsius()); + + println!(); + println!("{}", " Water Side (Evaporator Load)".cyan()); + println!(" T_water_in: {:.1}°C", t_water_in.to_celsius()); + println!(" T_water_out: {:.1}°C", t_water_out.to_celsius()); + println!( + " ṁ_water: {:.2} kg/s ({:.0} L/min)", + water_flow.to_kg_per_s(), + water_flow.to_kg_per_s() * 60.0 + ); + println!( + " Q_evap: {:.0} W = {:.1} kW", + q_evap, + q_evap / 1000.0 + ); + + // Air side (condenser) + let t_amb = Temperature::from_celsius(35.0); + let approach_cond = 10.0; // K approach temperature + let t_cond = Temperature::from_celsius(t_amb.to_celsius() + approach_cond); + + // Estimate compressor power (assuming COP ≈ 3.5) + let cop_estimate = 3.5; + let w_comp = q_evap / cop_estimate; + let q_cond = q_evap + w_comp; + + println!(); + println!("{}", " Air Side (Condenser Rejection)".cyan()); + println!(" T_ambient: {:.1}°C", t_amb.to_celsius()); + println!(" Approach: {:.1} K", approach_cond); + println!( + " T_cond: {:.1}°C ({:.2} K)", + t_cond.to_celsius(), + t_cond.to_kelvin() + ); + println!( + " Q_cond: {:.0} W = {:.1} kW", + q_cond, + q_cond / 1000.0 + ); + + println!(); + println!("{}", " Compressor".cyan()); + println!( + " W_comp: {:.0} W = {:.2} kW", + w_comp, + w_comp / 1000.0 + ); + println!(" COP (est): {:.1}", cop_estimate); + + // Evaporator temperature + let approach_evap = 5.0; // K + let t_evap = Temperature::from_celsius(t_water_out.to_celsius() - approach_evap); + + println!(); + println!("{}", " Refrigerant Cycle (R410A)".cyan()); + println!( + " T_evap: {:.1}°C ({:.2} K)", + t_evap.to_celsius(), + t_evap.to_kelvin() + ); + println!( + " T_cond: {:.1}°C ({:.2} K)", + t_cond.to_celsius(), + t_cond.to_kelvin() + ); + println!( + " ΔT_lift: {:.1} K", + t_cond.to_kelvin() - t_evap.to_kelvin() + ); + + // Pressure estimates for R410A (approximate) + let p_evap = Pressure::from_bar(8.0); // ~5°C saturation for R410A + let p_cond = Pressure::from_bar(24.0); // ~45°C saturation for R410A + let pressure_ratio = p_cond.to_bar() / p_evap.to_bar(); + + println!(" P_evap: {:.1} bar", p_evap.to_bar()); + println!(" P_cond: {:.1} bar", p_cond.to_bar()); + println!(" PR: {:.2}", pressure_ratio); + + // Heat exchanger sizing + println!(); + println!("{}", " Heat Exchanger Sizing".cyan()); + + // Condenser UA (air-cooled, typical 0.1-0.2 kW/K per kW capacity) + let ua_cond = q_cond / (t_cond.to_kelvin() - t_amb.to_kelvin()); + println!( + " UA_condenser: {:.0} W/K = {:.1} kW/K", + ua_cond, + ua_cond / 1000.0 + ); + + // Evaporator UA (BPHE, water-to-refrigerant) + let delta_t1 = t_water_in.to_kelvin() - t_evap.to_kelvin(); + let delta_t2 = t_water_out.to_kelvin() - t_evap.to_kelvin(); + let lmtd_evap = (delta_t1 - delta_t2) / (delta_t1 / delta_t2).ln(); + let ua_evap = q_evap / lmtd_evap; + println!(" LMTD_evap: {:.1} K", lmtd_evap); + println!( + " UA_evaporator: {:.0} W/K = {:.1} kW/K", + ua_evap, + ua_evap / 1000.0 + ); + + // ======================================== + // Part 2: Create Heat Exchangers (Real Components) + // ======================================== + print_header("Component Creation"); + + // Condenser (air-cooled) + print_section("Creating Air-Cooled Condenser"); + let condenser = CondenserCoil::with_saturation_temp(ua_cond, t_cond.to_kelvin()); + println!(" UA: {:.0} W/K", condenser.ua()); + println!( + " T_sat: {:.1}°C ({:.2} K)", + t_cond.to_celsius(), + t_cond.to_kelvin() + ); + println!(" Equations: {}", condenser.n_equations()); + println!(" {} Condenser coil created", "✓".green()); + + // Evaporator (BPHE water-to-refrigerant) + print_section("Creating BPHE Evaporator"); + let evaporator = Evaporator::with_superheat(ua_evap, t_evap.to_kelvin(), 5.0); + println!(" UA: {:.0} W/K", evaporator.ua()); + println!( + " T_sat: {:.1}°C ({:.2} K)", + t_evap.to_celsius(), + t_evap.to_kelvin() + ); + println!(" Superheat target: 5 K"); + println!(" Equations: {}", evaporator.n_equations()); + println!(" {} BPHE evaporator created", "✓".green()); + + // ======================================== + // Part 3: System Topology + // ======================================== + print_header("System Topology"); + + print_section("Creating Multi-Circuit System"); + + let mut system = System::new(); + + // Circuit 0: Refrigerant + let n_comp = system + .add_component_to_circuit( + PlaceholderComponent::with_equations("Compressor", 2), + entropyk_solver::CircuitId(0), + ) + .unwrap(); + let n_cond = system + .add_component_to_circuit( + Box::new(CondenserCoil::with_saturation_temp( + ua_cond, + t_cond.to_kelvin(), + )), + entropyk_solver::CircuitId(0), + ) + .unwrap(); + let n_exv = system + .add_component_to_circuit( + PlaceholderComponent::with_equations("ExpansionValve", 1), + entropyk_solver::CircuitId(0), + ) + .unwrap(); + let n_evap = system + .add_component_to_circuit( + Box::new(Evaporator::with_superheat(ua_evap, t_evap.to_kelvin(), 5.0)), + entropyk_solver::CircuitId(0), + ) + .unwrap(); + + // Circuit 1: Water + let n_pump = system + .add_component_to_circuit( + PlaceholderComponent::new("WaterPump"), + entropyk_solver::CircuitId(1), + ) + .unwrap(); + let n_load = system + .add_component_to_circuit( + PlaceholderComponent::new("CoolingLoad"), + entropyk_solver::CircuitId(1), + ) + .unwrap(); + + println!(" Circuit 0 (Refrigerant R410A):"); + println!(" [{}] Compressor", n_comp.index()); + println!(" [{}] CondenserCoil", n_cond.index()); + println!(" [{}] ExpansionValve", n_exv.index()); + println!(" [{}] Evaporator (BPHE)", n_evap.index()); + println!(); + println!(" Circuit 1 (Water/Glycol):"); + println!(" [{}] WaterPump", n_pump.index()); + println!(" [{}] CoolingLoad", n_load.index()); + + // Connect refrigerant cycle + print_subsection("Connecting Refrigerant Circuit"); + system.add_edge(n_comp, n_cond).unwrap(); + system.add_edge(n_cond, n_exv).unwrap(); + system.add_edge(n_exv, n_evap).unwrap(); + system.add_edge(n_evap, n_comp).unwrap(); + println!(" Compressor → Condenser → EXV → Evaporator → Compressor"); + println!(" {} Refrigerant cycle connected", "✓".green()); + + // Connect water cycle (independent circuit - no cross-circuit flow edges!) + print_subsection("Connecting Water Circuit"); + system.add_edge(n_pump, n_load).unwrap(); + system.add_edge(n_load, n_pump).unwrap(); + println!(" Pump → CoolingLoad → Pump (independent closed loop)"); + println!(" {} Water circuit connected", "✓".green()); + println!(); + println!( + " {} Thermal coupling handles heat transfer between circuits", + "Note:".cyan() + ); + println!(" (No cross-circuit flow edges allowed)"); + + println!(); + println!( + " {} circuits, {} components, {} flow edges", + system.circuit_count(), + system.node_count(), + system.edge_count() + ); + + // ======================================== + // Part 4: Thermal Coupling + // ======================================== + print_header("Thermal Coupling"); + + print_section("Adding Heat Exchanger Coupling"); + + // The evaporator thermally couples water to refrigerant + // Water is hot side, refrigerant is cold side (evaporating) + let coupling = ThermalCoupling::new( + entropyk_solver::CircuitId(1), // Hot: water circuit + entropyk_solver::CircuitId(0), // Cold: refrigerant circuit (evaporating) + ThermalConductance::from_watts_per_kelvin(ua_evap), + ) + .with_efficiency(0.95); + + let idx = system.add_thermal_coupling(coupling.clone()).unwrap(); + println!(" Coupling [{}]:", idx); + println!( + " Hot circuit: Circuit 1 (Water @ {:.1}°C)", + t_water_in.to_celsius() + ); + println!( + " Cold circuit: Circuit 0 (R410A @ {:.1}°C)", + t_evap.to_celsius() + ); + println!(" UA: {:.0} W/K", coupling.ua.to_watts_per_kelvin()); + println!(" Efficiency: {:.0}%", coupling.efficiency * 100.0); + println!(" {} Thermal coupling added", "✓".green()); + + // Compute heat transfer at design point + print_subsection("Heat Transfer at Design Point"); + let q_calc = compute_coupling_heat(&coupling, t_water_in, t_evap); + println!(" T_hot (water): {:.1}°C", t_water_in.to_celsius()); + println!(" T_cold (ref): {:.1}°C", t_evap.to_celsius()); + println!( + " ΔT: {:.1} K", + t_water_in.to_kelvin() - t_evap.to_kelvin() + ); + println!( + " Q_calc: {:.0} W = {:.1} kW", + q_calc, + q_calc / 1000.0 + ); + + // ======================================== + // Part 5: Circular Dependency Check + // ======================================== + print_header("Solver Strategy"); + + print_section("Coupling Analysis"); + + let couplings = system.thermal_couplings(); + let has_cycle = has_circular_dependencies(couplings); + + println!( + " Circular dependency: {}", + if has_cycle { "YES".red() } else { "NO".green() } + ); + + let groups = coupling_groups(couplings); + println!(" Coupling groups: {:?}", groups); + + if has_cycle { + println!(); + println!( + " {} Circuits with mutual coupling must be solved SIMULTANEOUSLY", + "→".yellow() + ); + println!(" (Newton-Raphson on combined system)"); + } else { + println!(); + println!(" {} Circuits can be solved SEQUENTIALLY", "→".green()); + println!(" (Water circuit → Refrigerant circuit)"); + } + + // ======================================== + // Part 6: Finalize System + // ======================================== + print_header("System Finalization"); + + match system.finalize() { + Ok(()) => { + println!(" {} System finalized successfully", "✓".green()); + println!( + " {} state variables (P, h per edge)", + system.state_vector_len() + ); + } + Err(e) => { + println!(" {} Finalization error: {:?}", "✗".red(), e); + } + } + + // ======================================== + // Summary + // ======================================== + print_header("System Summary"); + + println!(); + println!( + "{}", + " ┌─────────────────────────────────────────────────────────────┐".white() + ); + println!( + "{}", + " │ WATER CHILLER SYSTEM │".white() + ); + println!( + "{}", + " ├─────────────────────────────────────────────────────────────┤".white() + ); + println!( + "{}", + " │ REFRIGERANT CIRCUIT (R410A) │".white() + ); + println!( + "{}", + " │ Compressor: 2900 RPM, 30cc, η=85% │".white() + ); + println!( + " │ Condenser: Air-cooled, UA={:.0} W/K │", + ua_cond + ); + println!( + "{}", + " │ EXV: Isenthalpic, 100% open │".white() + ); + println!( + " │ Evaporator: BPHE, UA={:.0} W/K, SH=5K │", + ua_evap + ); + println!( + "{}", + " ├─────────────────────────────────────────────────────────────┤".white() + ); + println!( + "{}", + " │ WATER CIRCUIT │".white() + ); + println!( + " │ Pump: {:.2} kg/s, ΔP=200 kPa │", + 0.5 + ); + println!( + "{}", + " │ Inlet: 12°C │".white() + ); + println!( + "{}", + " │ Outlet: 7°C │".white() + ); + println!( + "{}", + " ├─────────────────────────────────────────────────────────────┤".white() + ); + println!( + "{}", + " │ PERFORMANCE (Design Point) │".white() + ); + println!( + " │ Q_evap: {:.1} kW │", + q_evap / 1000.0 + ); + println!( + " │ Q_cond: {:.1} kW │", + q_cond / 1000.0 + ); + println!( + " │ W_comp: {:.2} kW │", + w_comp / 1000.0 + ); + println!( + " │ COP: {:.1} │", + q_evap / w_comp + ); + println!( + "{}", + " └─────────────────────────────────────────────────────────────┘".white() + ); + + println!(); + println!("{}", "═".repeat(70).cyan()); + println!( + "{}", + " Next: Implement solver (Epic 4) to run full simulation".cyan() + ); + println!("{}", "═".repeat(70).cyan()); +} diff --git a/demo/src/bin/eurovent.rs b/demo/src/bin/eurovent.rs index ba4f3e2..398a18c 100644 --- a/demo/src/bin/eurovent.rs +++ b/demo/src/bin/eurovent.rs @@ -104,6 +104,7 @@ fn main() { "Water", )); + let cond_state = condenser_with_backend.hot_inlet_state().ok(); let cond = system.add_component_to_circuit(Box::new(condenser_with_backend), CircuitId(0)).unwrap(); // 40°C condensing backed by TestBackend let exv = system.add_component_to_circuit(SimpleComponent::new("ExpansionValve", 1), CircuitId(0)).unwrap(); @@ -285,6 +286,15 @@ fn main() { )); println!(" {} Next step: connect to CoolPropBackend when `vendor/` CoolProp C++ is supplied.", "→".cyan()); + + if let Some(state) = cond_state { + println!("\n {} Retrieved full ThermoState from Condenser hot inlet (before solve):", "✓".green()); + println!(" - Pressure: {:.2} bar", state.pressure.to_bar()); + println!(" - Temperature: {:.2} °C", state.temperature.to_celsius()); + println!(" - Enthalpy: {:.2} kJ/kg", state.enthalpy.to_joules_per_kg() / 1000.0); + println!(" - Density: {:.2} kg/m³", state.density); + println!(" - Phase: {:?}", state.phase); + } println!("\n{}", "═".repeat(70).cyan()); } diff --git a/demo/src/bin/expansion_valve.rs b/demo/src/bin/expansion_valve.rs new file mode 100644 index 0000000..8be1af1 --- /dev/null +++ b/demo/src/bin/expansion_valve.rs @@ -0,0 +1,32 @@ +//! Exemple: Détendeur (expansion valve) +//! +//! Modélisation d'une détente isenthalpique. +//! +//! Exécuter: cargo run -p entropyk-demo --bin expansion_valve + +use entropyk_components::expansion_valve::ExpansionValve; +use entropyk_components::port::{FluidId, Port}; +use entropyk_core::{Enthalpy, Pressure}; + +fn main() -> Result<(), Box> { + println!("=== Exemple: Détendeur (Expansion Valve) ===\n"); + + let inlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250_000.0), + ); + let outlet = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(10.0), + Enthalpy::from_joules_per_kg(250_000.0), + ); + + let valve = ExpansionValve::new(inlet, outlet, Some(1.0))?; + + println!("Détendeur créé:"); + println!(" - Fluide: {}", valve.fluid_id()); + println!(" - Ouverture: {:?}", valve.opening()); + + Ok(()) +} diff --git a/demo/src/bin/pipe.rs b/demo/src/bin/pipe.rs new file mode 100644 index 0000000..5c0e22a --- /dev/null +++ b/demo/src/bin/pipe.rs @@ -0,0 +1,40 @@ +//! Exemple: Conduite (pipe) +//! +//! Perte de charge avec Darcy-Weisbach. +//! +//! Exécuter: cargo run -p entropyk-demo --bin pipe + +use entropyk_components::pipe::{Pipe, PipeGeometry, roughness}; +use entropyk_components::port::{FluidId, Port}; +use entropyk_core::{Enthalpy, Pressure}; + +fn main() -> Result<(), Box> { + println!("=== Exemple: Conduite (Pipe) ===\n"); + + let geometry = PipeGeometry::new( + 10.0, + 0.022, + roughness::SMOOTH, + )?; + + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(84_000.0), + ); + let outlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(84_000.0), + ); + + let pipe = Pipe::new(geometry, inlet, outlet, 998.0, 0.001)?; + + println!("Conduite créée:"); + println!(" - Longueur: {} m", pipe.geometry().length_m); + println!(" - Diamètre: {} m", pipe.geometry().diameter_m); + println!(" - Rugosité: {} m", pipe.geometry().roughness_m); + println!(" - Fluide: {}", pipe.fluid_id()); + + Ok(()) +} diff --git a/demo/src/bin/ports.rs b/demo/src/bin/ports.rs new file mode 100644 index 0000000..b458b93 --- /dev/null +++ b/demo/src/bin/ports.rs @@ -0,0 +1,43 @@ +//! Exemple: Ports et connexions +//! +//! Démonstration du Type-State pattern pour les ports thermodynamiques. +//! +//! Exécuter: cargo run -p entropyk-demo --bin ports + +use entropyk_components::port::{ConnectionError, FluidId, Port}; +use entropyk_core::{Enthalpy, Pressure}; + +fn main() -> Result<(), ConnectionError> { + println!("=== Exemple: Ports et Connexions ===\n"); + + let port1 = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(400_000.0), + ); + + let port2 = Port::new( + FluidId::new("R134a"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(400_000.0), + ); + + println!("Port 1: fluide={}, P={:.2} bar, h={:.0} J/kg", + port1.fluid_id(), + port1.pressure().to_bar(), + port1.enthalpy().to_joules_per_kg() + ); + + let (mut connected1, _connected2) = port1.connect(port2)?; + println!("\n✅ Ports connectés avec succès!"); + + connected1.set_pressure(Pressure::from_bar(1.5)); + connected1.set_enthalpy(Enthalpy::from_joules_per_kg(450_000.0)); + + println!("Port 1 modifié: P={:.2} bar, h={:.0} J/kg", + connected1.pressure().to_bar(), + connected1.enthalpy().to_joules_per_kg() + ); + + Ok(()) +} diff --git a/demo/src/bin/pump.rs b/demo/src/bin/pump.rs new file mode 100644 index 0000000..894945f --- /dev/null +++ b/demo/src/bin/pump.rs @@ -0,0 +1,43 @@ +//! Exemple: Pompe +//! +//! Courbes de performance polynomiales. +//! +//! Exécuter: cargo run -p entropyk-demo --bin pump + +use entropyk_components::port::{FluidId, Port}; +use entropyk_components::pump::{Pump, PumpCurves}; +use entropyk_core::{Enthalpy, Pressure}; + +fn main() -> Result<(), Box> { + println!("=== Exemple: Pompe ===\n"); + + let curves = PumpCurves::quadratic( + 30.0, -10.0, -50.0, + 0.5, 0.3, -0.5, + )?; + + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100_000.0), + ); + let outlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100_000.0), + ); + + let pump = Pump::new(curves, inlet, outlet, 1000.0)?; + + println!("Pompe créée:"); + println!(" - Fluide: {}", pump.fluid_id()); + println!(" - Densité: {} kg/m³", pump.fluid_density()); + + for q in [0.0, 0.05, 0.1, 0.2] { + let head = pump.curves().head_at_flow(q); + let eff = pump.curves().efficiency_at_flow(q); + println!(" - Q={:.2} m³/s: H={:.2} m, η={:.1}%", q, head, eff * 100.0); + } + + Ok(()) +} diff --git a/demo/src/bin/pump_compressor_polynomials.rs b/demo/src/bin/pump_compressor_polynomials.rs new file mode 100644 index 0000000..e024e1b --- /dev/null +++ b/demo/src/bin/pump_compressor_polynomials.rs @@ -0,0 +1,131 @@ +//! Exemple: Pompe et compresseur avec polynômes +//! +//! Démontre l'utilisation des polynômes pour modéliser: +//! - Pompe: courbes H(Q) et η(Q) avec Polynomial1D +//! - Compresseur: modèle SST/SDT avec Polynomial2D +//! - Lois d'affinité pour variation de vitesse +//! +//! Exécuter: cargo run -p entropyk-demo --bin pump_compressor_polynomials + +use entropyk_components::compressor::SstSdtCoefficients; +use entropyk_components::polynomials::{AffinityLaws, Polynomial1D}; +use entropyk_components::port::{FluidId, Port}; +use entropyk_components::pump::{Pump, PumpCurves}; +use entropyk_core::{Enthalpy, Pressure}; + +fn main() -> Result<(), Box> { + println!("╔══════════════════════════════════════════════════════════════╗"); + println!("║ Exemple: Pompe et Compresseur avec Polynômes ║"); + println!("╚══════════════════════════════════════════════════════════════╝\n"); + + // ═══════════════════════════════════════════════════════════════ + // 1. POLYNÔMES 1D - Courbes de pompe + // ═══════════════════════════════════════════════════════════════ + println!("📐 1. Polynômes 1D (Pompe)\n"); + + // H = 30 - 10*Q - 50*Q² (hauteur en m, Q en m³/s) + let head_poly = Polynomial1D::quadratic(30.0, -10.0, -50.0); + // η = 0.5 + 0.3*Q - 0.5*Q² (rendement 0-1) + let eff_poly = Polynomial1D::quadratic(0.5, 0.3, -0.5); + + println!(" Courbe hauteur: H = 30 - 10*Q - 50*Q²"); + println!(" Courbe rendement: η = 0.5 + 0.3*Q - 0.5*Q²\n"); + + for q in [0.0, 0.05, 0.1, 0.15, 0.2] { + let h = head_poly.evaluate(q); + let eta = eff_poly.evaluate(q); + println!(" Q={:.2} m³/s → H={:.2} m, η={:.1}%", q, h, eta.clamp(0.0, 1.0) * 100.0); + } + + // ═══════════════════════════════════════════════════════════════ + // 2. POMPE avec courbes polynomiales + // ═══════════════════════════════════════════════════════════════ + println!("\n🔧 2. Pompe (PumpCurves polynomiales)\n"); + + let curves = PumpCurves::quadratic( + 30.0, -10.0, -50.0, // H = h0 + h1*Q + h2*Q² + 0.5, 0.3, -0.5, // η = e0 + e1*Q + e2*Q² + )?; + + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100_000.0), + ); + let outlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100_000.0), + ); + + let pump = Pump::new(curves, inlet, outlet, 1000.0)?; + + println!(" Pompe créée (eau, ρ=1000 kg/m³)"); + println!(" Point nominal Q=0.1 m³/s: H={:.2} m, η={:.1}%\n", + pump.curves().head_at_flow(0.1), + pump.curves().efficiency_at_flow(0.1) * 100.0 + ); + + // ═══════════════════════════════════════════════════════════════ + // 3. POLYNÔMES 2D - Modèle compresseur SST/SDT + // ═══════════════════════════════════════════════════════════════ + println!("📐 3. Polynômes 2D (Compresseur SST/SDT)\n"); + + // Modèle bilinéaire: ṁ = a00 + a10*SST + a01*SDT + a11*SST*SDT + // Ẇ = b00 + b10*SST + b01*SDT + b11*SST*SDT + let sst_sdt = SstSdtCoefficients::bilinear( + 0.05, 0.001, 0.0005, 0.00001, // débit (kg/s) + 1000.0, 50.0, 30.0, 0.5, // puissance (W) + ); + + println!(" Modèle: ṁ = f(SST, SDT), Ẇ = g(SST, SDT)"); + println!(" SST = température saturation aspiration (K)"); + println!(" SDT = température saturation refoulement (K)\n"); + + // Conditions typiques: évaporation -5°C (268K), condensation 40°C (313K) + let sst_evap = 268.15; // -5°C + let sdt_cond = 313.15; // 40°C + + let mass_flow = sst_sdt.mass_flow_at(sst_evap, sdt_cond); + let power = sst_sdt.power_at(sst_evap, sdt_cond); + + println!(" SST={:.1} K (-5°C), SDT={:.1} K (40°C):", sst_evap, sdt_cond); + println!(" → ṁ = {:.4} kg/s", mass_flow); + println!(" → Ẇ = {:.0} W\n", power); + + // Grille de conditions + println!(" Grille de performance:"); + println!(" {:>8} | {:>8} {:>8} {:>8} {:>8}", "SST\\SDT", "303K", "308K", "313K", "318K"); + println!(" {} | {} {} {} {}", "--------", "--------", "--------", "--------", "--------"); + + for sst in [263.15, 268.15, 273.15] { + print!(" {:>6.0}K |", sst); + for sdt in [303.15, 308.15, 313.15, 318.15] { + let m = sst_sdt.mass_flow_at(sst, sdt); + print!(" {:>7.3} ", m); + } + println!(); + } + + // ═══════════════════════════════════════════════════════════════ + // 4. Lois d'affinité (variation de vitesse) + // ═══════════════════════════════════════════════════════════════ + println!("\n📐 4. Lois d'affinité (pompe/ventilateur à vitesse variable)\n"); + + let speed_ratios = [1.0, 0.8, 0.6, 0.5]; + + println!(" À 50% vitesse: Q₂=0.5*Q₁, H₂=0.25*H₁, P₂=0.125*P₁\n"); + println!(" {:>10} | {:>10} {:>10} {:>10}", "Vitesse", "Q ratio", "H ratio", "P ratio"); + println!(" {} | {} {} {}", "----------", "----------", "----------", "----------"); + + for &ratio in &speed_ratios { + // AffinityLaws: Q₂=scale_flow(Q₁), H₂=scale_head(H₁), P₂=scale_power(P₁) + let q_ratio = AffinityLaws::scale_flow(1.0, ratio); + let h_ratio = AffinityLaws::scale_head(1.0, ratio); + let p_ratio = AffinityLaws::scale_power(1.0, ratio); + println!(" {:>8.0}% | {:>10.2} {:>10.2} {:>10.2}", ratio * 100.0, q_ratio, h_ratio, p_ratio); + } + + println!("\n✅ Exemple terminé !"); + Ok(()) +} diff --git a/demo/src/bin/thermal_coupling.rs b/demo/src/bin/thermal_coupling.rs new file mode 100644 index 0000000..7680dbf --- /dev/null +++ b/demo/src/bin/thermal_coupling.rs @@ -0,0 +1,428 @@ +//! Demo Entropyk - Thermal Coupling Between Circuits +//! +//! This example demonstrates: +//! - Multi-circuit system creation (2 circuits) +//! - Component placement in circuits +//! - Thermal coupling between circuits (heat exchanger) +//! - Circular dependency detection +//! - Heat transfer computation + +use colored::Colorize; +use entropyk_components::{ + Component, ComponentError, JacobianBuilder, ResidualVector, SystemState, +}; +use entropyk_core::{Temperature, ThermalConductance}; +use entropyk_solver::{ + compute_coupling_heat, coupling_groups, has_circular_dependencies, CircuitId, System, + ThermalCoupling, +}; +use std::fmt; + +fn print_header(title: &str) { + println!(); + println!("{}", "═".repeat(60).cyan()); + println!("{}", format!(" {}", title).cyan().bold()); + println!("{}", "═".repeat(60).cyan()); +} + +fn print_section(title: &str) { + println!(); + println!("{}", format!("▶ {}", title).yellow().bold()); + println!("{}", "─".repeat(40).yellow()); +} + +struct SimpleComponent { + name: String, + n_eqs: usize, +} + +impl SimpleComponent { + fn new(name: &str) -> Box { + Box::new(Self { + name: name.to_string(), + n_eqs: 0, + }) + } +} + +impl Component for SimpleComponent { + fn compute_residuals( + &self, + _state: &SystemState, + residuals: &mut ResidualVector, + ) -> Result<(), ComponentError> { + for r in residuals.iter_mut().take(self.n_eqs) { + *r = 0.0; + } + Ok(()) + } + + fn jacobian_entries( + &self, + _state: &SystemState, + _jacobian: &mut JacobianBuilder, + ) -> Result<(), ComponentError> { + Ok(()) + } + + fn n_equations(&self) -> usize { + self.n_eqs + } + + fn get_ports(&self) -> &[entropyk_components::ConnectedPort] { + &[] + } +} + +impl fmt::Debug for SimpleComponent { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("SimpleComponent") + .field("name", &self.name) + .finish() + } +} + +fn main() { + println!( + "{}", + "\n╔══════════════════════════════════════════════════════════╗".green() + ); + println!( + "{}", + "║ ENTROPYK - Thermal Coupling Demo (Story 3.4) ║" + .green() + .bold() + ); + println!( + "{}", + "╚══════════════════════════════════════════════════════════╝\n".green() + ); + + // ======================================== + // PART 1: Basic Thermal Coupling + // ======================================== + print_header("Part 1: Basic Thermal Coupling"); + + print_section("Creating ThermalCoupling struct"); + let coupling = ThermalCoupling::new( + CircuitId(0), // Hot circuit (refrigerant) + CircuitId(1), // Cold circuit (water/glycol) + ThermalConductance::from_watts_per_kelvin(5000.0), // 5 kW/K UA value + ) + .with_efficiency(0.95); // 95% heat exchanger efficiency + + println!(" {} {:?}", "Coupling:".white(), coupling); + println!( + " {} {} W/K", + "UA:".white(), + coupling.ua.to_watts_per_kelvin() + ); + println!( + " {} {:.0}%", + "Efficiency:".white(), + coupling.efficiency * 100.0 + ); + + print_section("Computing heat transfer"); + let t_hot = Temperature::from_celsius(45.0); // Refrigerant condensing at 45°C + let t_cold = Temperature::from_celsius(35.0); // Water entering at 35°C + + let q = compute_coupling_heat(&coupling, t_hot, t_cold); + + println!( + " {} {:.1}°C ({:.1} K)", + "T_hot:".white(), + t_hot.to_celsius(), + t_hot.to_kelvin() + ); + println!( + " {} {:.1}°C ({:.1} K)", + "T_cold:".white(), + t_cold.to_celsius(), + t_cold.to_kelvin() + ); + println!( + " {} {:.1} K", + "ΔT:".white(), + t_hot.to_kelvin() - t_cold.to_kelvin() + ); + println!(); + println!( + " {} {:.1} W = {:.2} kW", + "Heat transfer (Q):".green().bold(), + q, + q / 1000.0 + ); + println!( + " {} Q > 0 means heat flows INTO cold circuit", + "Sign convention:".white() + ); + + // Energy conservation demonstration + println!(); + println!("{}", " Energy Conservation:".cyan()); + let q_into_cold = q; + let q_out_of_hot = -q; + println!( + " Q_cold = {:.2} kW (heat received)", + q_into_cold / 1000.0 + ); + println!( + " Q_hot = {:.2} kW (heat rejected)", + q_out_of_hot / 1000.0 + ); + println!(" {} Q_cold + Q_hot = 0 ✓", "Check:".green()); + + // ======================================== + // PART 2: Multi-Circuit System + // ======================================== + print_header("Part 2: Multi-Circuit System"); + + print_section("Creating 2-circuit heat pump system"); + let mut system = System::new(); + + // Circuit 0: Refrigerant circuit + let comp = system + .add_component_to_circuit(SimpleComponent::new("Compressor"), CircuitId(0)) + .unwrap(); + let cond = system + .add_component_to_circuit(SimpleComponent::new("Condenser"), CircuitId(0)) + .unwrap(); + let valve = system + .add_component_to_circuit(SimpleComponent::new("ExpansionValve"), CircuitId(0)) + .unwrap(); + let evap = system + .add_component_to_circuit(SimpleComponent::new("Evaporator"), CircuitId(0)) + .unwrap(); + + // Circuit 1: Water/glycol circuit + let pump = system + .add_component_to_circuit(SimpleComponent::new("Pump"), CircuitId(1)) + .unwrap(); + let hx = system + .add_component_to_circuit(SimpleComponent::new("HeatExchanger"), CircuitId(1)) + .unwrap(); + + println!(" Circuit 0 (Refrigerant):"); + println!(" - Compressor, Condenser, ExpansionValve, Evaporator"); + println!(" Circuit 1 (Water/Glycol):"); + println!(" - Pump, HeatExchanger"); + + // Connect refrigerant circuit (cycle) + system.add_edge(comp, cond).unwrap(); + system.add_edge(cond, valve).unwrap(); + system.add_edge(valve, evap).unwrap(); + system.add_edge(evap, comp).unwrap(); + + // Connect water circuit (simple loop) + system.add_edge(pump, hx).unwrap(); + system.add_edge(hx, pump).unwrap(); + + println!(); + println!( + " {} {} circuits, {} components, {} flow edges", + "System:".white(), + system.circuit_count(), + system.node_count(), + system.edge_count() + ); + + print_section("Adding thermal coupling between circuits"); + let thermal_coupling = ThermalCoupling::new( + CircuitId(0), // Hot: refrigerant condenser + CircuitId(1), // Cold: water circuit heat exchanger + ThermalConductance::from_watts_per_kelvin(8000.0), + ); + + match system.add_thermal_coupling(thermal_coupling.clone()) { + Ok(idx) => println!(" {} Coupling added at index {}", "✓".green(), idx), + Err(e) => println!(" {} Error: {:?}", "✗".red(), e), + } + + println!(); + println!( + " {} {}", + "Couplings:".white(), + system.thermal_coupling_count() + ); + for (i, c) in system.thermal_couplings().iter().enumerate() { + println!( + " [{}] Circuit {} → Circuit {} (UA = {} W/K)", + i, + c.hot_circuit.0, + c.cold_circuit.0, + c.ua.to_watts_per_kelvin() + ); + } + + // Finalize system + match system.finalize() { + Ok(()) => println!("\n {} System finalized successfully", "✓".green()), + Err(e) => println!("\n {} Finalize error: {:?}", "✗".red(), e), + } + + // ======================================== + // PART 3: Circular Dependency Detection + // ======================================== + print_header("Part 3: Circular Dependency Detection"); + + print_section("Scenario A: Single coupling (no cycle)"); + let couplings_a = vec![ThermalCoupling::new( + CircuitId(0), + CircuitId(1), + ThermalConductance::from_watts_per_kelvin(1000.0), + )]; + let has_cycle_a = has_circular_dependencies(&couplings_a); + println!(" Couplings: Circuit 0 → Circuit 1"); + println!( + " {} {}", + "Circular dependency:".white(), + if has_cycle_a { + "YES (solve simultaneously)".red() + } else { + "NO (solve sequentially)".green() + } + ); + + let groups_a = coupling_groups(&couplings_a); + println!(" {} {:?}", "Coupling groups:".white(), groups_a); + + print_section("Scenario B: Mutual coupling (cycle!)"); + let couplings_b = vec![ + ThermalCoupling::new( + CircuitId(0), + CircuitId(1), + ThermalConductance::from_watts_per_kelvin(1000.0), + ), + ThermalCoupling::new( + CircuitId(1), + CircuitId(0), // Back-coupling! + ThermalConductance::from_watts_per_kelvin(500.0), + ), + ]; + let has_cycle_b = has_circular_dependencies(&couplings_b); + println!(" Couplings:"); + println!(" Circuit 0 → Circuit 1"); + println!(" Circuit 1 → Circuit 0 (back-coupling!)"); + println!(); + println!( + " {} {}", + "Circular dependency:".white(), + if has_cycle_b { + "YES (solve simultaneously)".red() + } else { + "NO (solve sequentially)".green() + } + ); + + let groups_b = coupling_groups(&couplings_b); + println!(" {} {:?}", "Coupling groups:".white(), groups_b); + if groups_b.iter().any(|g| g.len() > 1) { + println!( + " {} Circuits in same group must be solved together", + "→".yellow() + ); + } + + print_section("Scenario C: Chain + mutual (complex)"); + let couplings_c = vec![ + ThermalCoupling::new( + CircuitId(0), + CircuitId(1), + ThermalConductance::from_watts_per_kelvin(1000.0), + ), + ThermalCoupling::new( + CircuitId(1), + CircuitId(0), + ThermalConductance::from_watts_per_kelvin(500.0), + ), // 0↔1 cycle + ThermalCoupling::new( + CircuitId(2), + CircuitId(3), + ThermalConductance::from_watts_per_kelvin(800.0), + ), // independent + ]; + let has_cycle_c = has_circular_dependencies(&couplings_c); + println!(" Couplings:"); + println!(" Circuit 0 ↔ Circuit 1 (mutual)"); + println!(" Circuit 2 → Circuit 3 (independent)"); + println!(); + println!( + " {} {}", + "Circular dependency:".white(), + if has_cycle_c { + "YES".red() + } else { + "NO".green() + } + ); + + let groups_c = coupling_groups(&couplings_c); + println!(" {} {:?}", "Coupling groups:".white(), groups_c); + println!( + " {} [0,1] together, [2] independent, [3] independent", + "→".yellow() + ); + + // ======================================== + // PART 4: Error Handling + // ======================================== + print_header("Part 4: Error Handling"); + + print_section("Invalid circuit validation"); + let mut sys_test = System::new(); + sys_test + .add_component_to_circuit(SimpleComponent::new("A"), CircuitId(0)) + .unwrap(); + // Circuit 1 has NO components! + + let invalid_coupling = ThermalCoupling::new( + CircuitId(0), + CircuitId(1), // This circuit doesn't exist! + ThermalConductance::from_watts_per_kelvin(1000.0), + ); + + match sys_test.add_thermal_coupling(invalid_coupling) { + Ok(_) => println!(" {} Unexpected success!", "✗".red()), + Err(e) => { + println!(" {} Correctly rejected invalid coupling", "✓".green()); + println!(" {} {}", "Error:".white(), e); + } + } + + // ======================================== + // Summary + // ======================================== + print_header("Summary"); + + println!(); + println!( + " {} ThermalCoupling struct with hot/cold circuits + UA + efficiency", + "✓".green() + ); + println!( + " {} compute_coupling_heat() with sign convention (Q > 0 = heat into cold)", + "✓".green() + ); + println!( + " {} has_circular_dependencies() via petgraph cycle detection", + "✓".green() + ); + println!( + " {} coupling_groups() via Kosaraju SCC for solver strategy", + "✓".green() + ); + println!( + " {} System.add_thermal_coupling() with circuit validation", + "✓".green() + ); + println!(" {} InvalidCircuitForCoupling error handling", "✓".green()); + + println!(); + println!("{}", "═".repeat(60).cyan()); + println!( + "{}", + " Demo complete! Run 'cargo run --bin thermal-coupling' again.".cyan() + ); + println!("{}", "═".repeat(60).cyan()); +} diff --git a/demo/src/bin/ui_server.rs b/demo/src/bin/ui_server.rs new file mode 100644 index 0000000..c1163c8 --- /dev/null +++ b/demo/src/bin/ui_server.rs @@ -0,0 +1,313 @@ +//! Serveur UI Entropyk - Utilise les composants Rust réels pour les calculs. +//! +//! Lance l'UI et une API qui exécute les calculs avec les vrais composants. +//! +//! cargo run -p entropyk-demo --bin ui-server + +use axum::{ + extract::Json, + routing::post, + Router, +}; +use entropyk_components::compressor::SstSdtCoefficients; +use entropyk_components::pipe::{friction_factor, Pipe, PipeGeometry}; +use entropyk_components::pump::{Pump, PumpCurves}; +use entropyk_components::port::{FluidId, Port}; +use entropyk_core::{Enthalpy, Pressure}; +use serde::{Deserialize, Serialize}; +use std::path::Path; +use tower_http::services::ServeDir; + +#[derive(Debug, Deserialize)] +struct ComponentConfig { + id: String, + #[serde(rename = "type")] + comp_type: String, + label: String, + config: serde_json::Value, +} + +#[derive(Debug, Deserialize)] +struct CalculateRequest { + components: Vec, +} + +#[derive(Debug, Serialize)] +struct ComponentResult { + id: String, + label: String, + #[serde(rename = "type")] + comp_type: String, + results: serde_json::Value, + error: Option, +} + +#[derive(Debug, Serialize)] +struct CalculateResponse { + results: Vec, +} + +fn parse_coeffs(s: &str) -> Vec { + s.split(',') + .filter_map(|x| x.trim().parse::().ok()) + .collect() +} + +async fn calculate( + axum::extract::Json(req): axum::extract::Json, +) -> axum::Json { + let mut results = Vec::new(); + + for comp in req.components { + let result = match comp.comp_type.as_str() { + "pump" => calc_pump(&comp), + "compressor" => calc_compressor(&comp), + "pipe" => calc_pipe(&comp), + "valve" => calc_valve(&comp), + _ => ComponentResult { + id: comp.id.clone(), + label: comp.label.clone(), + comp_type: comp.comp_type.clone(), + results: serde_json::json!({}), + error: Some("Type inconnu".to_string()), + }, + }; + results.push(result); + } + + Json(CalculateResponse { results }) +} + +fn calc_pump(comp: &ComponentConfig) -> ComponentResult { + let config = &comp.config; + let head_coeffs = config + .get("head_coeffs") + .and_then(|v| v.as_str()) + .unwrap_or("30,-10,-50"); + let eff_coeffs = config + .get("eff_coeffs") + .and_then(|v| v.as_str()) + .unwrap_or("0.5,0.3,-0.5"); + let density = config + .get("density") + .and_then(|v| v.as_f64()) + .unwrap_or(1000.0); + + let h = parse_coeffs(head_coeffs); + let e = parse_coeffs(eff_coeffs); + + if h.len() < 3 || e.len() < 3 { + return ComponentResult { + id: comp.id.clone(), + label: comp.label.clone(), + comp_type: "pump".to_string(), + results: serde_json::json!({}), + error: Some("Coefficients insuffisants (min 3 pour H et η)".to_string()), + }; + } + + match PumpCurves::quadratic(h[0], h[1], h[2], e[0], e[1], e[2]) { + Ok(curves) => { + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100_000.0), + ); + let outlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(1.0), + Enthalpy::from_joules_per_kg(100_000.0), + ); + + match Pump::new(curves, inlet, outlet, density) { + Ok(pump) => { + let points: Vec<_> = [0.0, 0.05, 0.1, 0.15, 0.2] + .iter() + .map(|&q| { + serde_json::json!({ + "Q_m3_s": q, + "H_m": pump.curves().head_at_flow(q), + "efficiency": pump.curves().efficiency_at_flow(q) + }) + }) + .collect(); + + ComponentResult { + id: comp.id.clone(), + label: comp.label.clone(), + comp_type: "pump".to_string(), + results: serde_json::json!({ + "curve_points": points, + "density_kg_m3": density + }), + error: None, + } + } + Err(e) => ComponentResult { + id: comp.id.clone(), + label: comp.label.clone(), + comp_type: "pump".to_string(), + results: serde_json::json!({}), + error: Some(e.to_string()), + }, + } + } + Err(e) => ComponentResult { + id: comp.id.clone(), + label: comp.label.clone(), + comp_type: "pump".to_string(), + results: serde_json::json!({}), + error: Some(e.to_string()), + }, + } +} + +fn calc_compressor(comp: &ComponentConfig) -> ComponentResult { + let config = &comp.config; + let mass_s = config + .get("mass_coeffs") + .and_then(|v| v.as_str()) + .unwrap_or("0.05,0.001,0.0005,0.00001"); + let power_s = config + .get("power_coeffs") + .and_then(|v| v.as_str()) + .unwrap_or("1000,50,30,0.5"); + + let m = parse_coeffs(mass_s); + let p = parse_coeffs(power_s); + + if m.len() < 4 || p.len() < 4 { + return ComponentResult { + id: comp.id.clone(), + label: comp.label.clone(), + comp_type: "compressor".to_string(), + results: serde_json::json!({}), + error: Some("Coefficients SST/SDT: 4 valeurs (a00,a10,a01,a11)".to_string()), + }; + } + + let sst_sdt = SstSdtCoefficients::bilinear( + m[0], m[1], m[2], m[3], + p[0], p[1], p[2], p[3], + ); + + let sst = 268.15; + let sdt = 313.15; + let mass_flow = sst_sdt.mass_flow_at(sst, sdt); + let power = sst_sdt.power_at(sst, sdt); + + ComponentResult { + id: comp.id.clone(), + label: comp.label.clone(), + comp_type: "compressor".to_string(), + results: serde_json::json!({ + "SST_K": sst, + "SDT_K": sdt, + "mass_flow_kg_s": mass_flow, + "power_W": power + }), + error: None, + } +} + +fn calc_pipe(comp: &ComponentConfig) -> ComponentResult { + let config = &comp.config; + let length = config.get("length").and_then(|v| v.as_f64()).unwrap_or(10.0); + let diameter = config.get("diameter").and_then(|v| v.as_f64()).unwrap_or(0.022); + let rough = config.get("roughness").and_then(|v| v.as_f64()).unwrap_or(1.5e-6); + + match PipeGeometry::new(length, diameter, rough) { + Ok(geometry) => { + let inlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(84_000.0), + ); + let outlet = Port::new( + FluidId::new("Water"), + Pressure::from_bar(2.0), + Enthalpy::from_joules_per_kg(84_000.0), + ); + + match Pipe::new(geometry, inlet, outlet, 998.0, 0.001) { + Ok(_pipe) => { + let flow = 0.01; + let area = geometry.area(); + let velocity = flow / area; + let re = velocity * diameter * 998.0 / 0.001; + let rel_rough = rough / diameter; + let f = friction_factor::haaland(rel_rough, re); + let dp = f * (length / diameter) * (998.0 * velocity * velocity / 2.0); + + ComponentResult { + id: comp.id.clone(), + label: comp.label.clone(), + comp_type: "pipe".to_string(), + results: serde_json::json!({ + "length_m": length, + "diameter_m": diameter, + "pressure_drop_Pa_at_0.01_m3_s": dp, + "reynolds_at_0.01_m3_s": re + }), + error: None, + } + } + Err(e) => ComponentResult { + id: comp.id.clone(), + label: comp.label.clone(), + comp_type: "pipe".to_string(), + results: serde_json::json!({}), + error: Some(e.to_string()), + }, + } + } + Err(e) => ComponentResult { + id: comp.id.clone(), + label: comp.label.clone(), + comp_type: "pipe".to_string(), + results: serde_json::json!({}), + error: Some(e.to_string()), + }, + } +} + +fn calc_valve(comp: &ComponentConfig) -> ComponentResult { + let config = &comp.config; + let opening = config.get("opening").and_then(|v| v.as_f64()).unwrap_or(1.0); + + ComponentResult { + id: comp.id.clone(), + label: comp.label.clone(), + comp_type: "valve".to_string(), + results: serde_json::json!({ + "opening": opening, + "note": "Détendeur isenthalpique - calcul complet avec solveur" + }), + error: None, + } +} + +#[tokio::main] +async fn main() { + let port = std::env::var("PORT").unwrap_or_else(|_| "3030".to_string()); + let addr = format!("0.0.0.0:{}", port); + + let ui_path = Path::new(env!("CARGO_MANIFEST_DIR")).join("../ui"); + println!("Entropyk UI - http://localhost:{}", port); + println!("Dossier UI: {}", ui_path.display()); + + let app = Router::new() + .route("/api/calculate", post(calculate)) + .nest_service("/", ServeDir::new(ui_path)); + + let listener = match tokio::net::TcpListener::bind(&addr).await { + Ok(l) => l, + Err(e) => { + eprintln!("Erreur: impossible de lier le port {} ({})", port, e); + eprintln!(" → Port déjà utilisé? Essayez: PORT=3031 cargo run -p entropyk-demo --bin ui-server"); + eprintln!(" → Ou tuez le processus: lsof -ti:{} | xargs kill", port); + std::process::exit(1); + } + }; + axum::serve(listener, app).await.unwrap(); +} diff --git a/demo/src/main.rs b/demo/src/main.rs new file mode 100644 index 0000000..f2d9bc4 --- /dev/null +++ b/demo/src/main.rs @@ -0,0 +1,107 @@ +//! Demo Entropyk - Test du State Machine (ON/OFF/BYPASS) +//! +//! Ce fichier montre comment utiliser OperationalState et CircuitId + +use colored::Colorize; +use entropyk_components::state_machine::{CircuitId, OperationalState}; + +fn print_header(title: &str) { + println!(); + println!("{}", "═".repeat(60).cyan()); + println!("{}", format!(" {}", title).cyan().bold()); + println!("{}", "═".repeat(60).cyan()); +} + +fn main() { + println!( + "{}", + "\n╔══════════════════════════════════════════════════════════╗".green() + ); + println!( + "{}", + "║ DEMO ENTROPYK - State Machine (ON/OFF/BYPASS) ║" + .green() + .bold() + ); + println!( + "{}", + "╚══════════════════════════════════════════════════════════╝\n".green() + ); + + print_header("États Opérationnels"); + + println!(); + println!("Les composants peuvent être dans 3 états:"); + println!(); + + for state in [ + OperationalState::On, + OperationalState::Off, + OperationalState::Bypass, + ] { + println!(" {:?}:", state); + println!(" - Actif: {}", state.is_active()); + println!( + " - Multiplicateur débit: {:.1}", + state.mass_flow_multiplier() + ); + + match state { + OperationalState::On => { + println!(" → Composant fonctionne normalement"); + } + OperationalState::Off => { + println!(" → Composant arrêté, débit = 0"); + } + OperationalState::Bypass => { + println!(" → Composant court-circuité (P_in = P_out, h_in = h_out)"); + } + } + println!(); + } + + print_header("CircuitId (Multi-Circuit)"); + + println!(); + println!("Un système peut avoir jusqu'à 5 circuits indépendants:"); + println!(); + + let circuits = vec![ + CircuitId::new("primary"), + CircuitId::new("secondary"), + CircuitId::default(), + ]; + + for circuit in &circuits { + println!(" Circuit: {} (as_str: \"{}\")", circuit, circuit.as_str()); + } + + println!(); + println!(" Utilisation typique:"); + println!(" - Circuit 0: Boucle réfrigérant principale"); + println!(" - Circuit 1: Circuit eau/glycol"); + println!(" - Circuit 2: Circuit secondaire (optionnel)"); + + print_header("Exemples d'utilisation"); + + println!(); + println!(" // Créer un système multi-circuit"); + println!(" let mut system = System::new();"); + println!(" system.add_component_to_circuit(compressor, CircuitId(0));"); + println!(" system.add_component_to_circuit(pump, CircuitId(1));"); + println!(); + println!(" // Couplage thermique entre circuits"); + println!(" let coupling = ThermalCoupling::new("); + println!(" CircuitId(0), // chaud"); + println!(" CircuitId(1), // froid"); + println!(" ThermalConductance::from_watts_per_kelvin(5000.0),"); + println!(" );"); + + println!(); + println!("{}", "═".repeat(60).cyan()); + println!( + "{}", + " Voir 'cargo run --bin thermal-coupling' pour la démo complète".cyan() + ); + println!("{}", "═".repeat(60).cyan()); +} diff --git a/ui/README.md b/ui/README.md new file mode 100644 index 0000000..075da41 --- /dev/null +++ b/ui/README.md @@ -0,0 +1,31 @@ +# Entropyk - Test UI + +Interface qui utilise les **composants Rust réels** pour les calculs. + +## Lancer + +```bash +cargo run -p entropyk-demo --bin ui-server +``` + +Puis ouvrir **http://localhost:3030** dans le navigateur. + +## Utilisation + +1. **Glisser-déposer** les composants de la palette vers le canvas +2. **Configurer** : sélectionner un composant → panneau droit pour fluide, polynômes, etc. +3. **Relier** : cliquer "Relier", puis cliquer sur une **sortie** (orange) puis une **entrée** (verte) +4. **Calculer** : envoie la config au serveur Rust, résultats affichés + +## Ports + +Chaque composant a des ports (points de connexion) : +- **Entrée** (vert) : aspiration, inlet +- **Sortie** (orange) : refoulement, outlet + +## Composants (calculs réels) + +- **Pompe** : PumpCurves, head_at_flow(), efficiency_at_flow() +- **Compresseur** : SstSdtCoefficients, mass_flow_at(), power_at() +- **Conduite** : PipeGeometry, Darcy-Weisbach, friction_factor +- **Détendeur** : config (solveur à venir) diff --git a/ui/index.html b/ui/index.html new file mode 100644 index 0000000..2aa9c12 --- /dev/null +++ b/ui/index.html @@ -0,0 +1,381 @@ + + + + + + Entropyk - Test UI + + + +
+

🔧 Entropyk - Test UI

+ +
+ +
+
+ +
+ + + + + +
+
+ + + +