From 27123e29fab84242b9651d8aa81637c995914bf7 Mon Sep 17 00:00:00 2001 From: Vittorio Distefano Date: Sat, 21 Mar 2026 14:43:56 +0100 Subject: [PATCH 1/2] feat(cli): salvage generic scaffold refresh for review --- crates/solverforge-cli/src/commands/new.rs | 27 +- crates/solverforge-cli/src/main.rs | 8 +- .../templates/basic/generic/Cargo.toml.tmpl | 8 +- .../templates/basic/generic/solver.toml | 12 + .../templates/basic/generic/src/api/dto.rs | 48 ++- .../templates/basic/generic/src/api/mod.rs | 1 + .../templates/basic/generic/src/api/routes.rs | 155 ++++++-- .../templates/basic/generic/src/api/sse.rs | 41 +++ .../generic/src/constraints/affinity_match.rs | 26 ++ .../generic/src/constraints/all_assigned.rs | 7 +- .../generic/src/constraints/balanced_load.rs | 16 + .../generic/src/constraints/capacity_limit.rs | 151 ++++++++ .../basic/generic/src/constraints/mod.rs | 10 +- .../templates/basic/generic/src/data/mod.rs | 69 +++- .../templates/basic/generic/src/domain/mod.rs | 2 +- .../basic/generic/src/domain/resource.rs | 13 +- .../basic/generic/src/domain/task.rs | 13 +- .../templates/basic/generic/src/main.rs.tmpl | 23 +- .../basic/generic/src/solver/service.rs | 81 +++- .../templates/basic/generic/static/app.js | 345 ++++++++++++++++++ .../templates/basic/generic/static/index.html | 83 +---- .../basic/generic/static/sf-config.json | 19 + .../templates/list/generic/Cargo.toml.tmpl | 27 ++ .../templates/list/generic/solver.toml | 14 + .../templates/list/generic/src/api/dto.rs | 114 ++++++ .../templates/list/generic/src/api/mod.rs | 5 + .../templates/list/generic/src/api/routes.rs | 225 ++++++++++++ .../templates/list/generic/src/api/sse.rs | 41 +++ .../generic/src/constraints/balanced_load.rs | 21 ++ .../list/generic/src/constraints/mod.rs | 21 ++ .../templates/list/generic/src/data/mod.rs | 49 +++ .../list/generic/src/domain/container.rs | 24 ++ .../templates/list/generic/src/domain/item.rs | 21 ++ .../templates/list/generic/src/domain/mod.rs | 7 + .../templates/list/generic/src/domain/plan.rs | 32 ++ .../templates/list/generic/src/lib.rs | 14 + .../templates/list/generic/src/main.rs.tmpl | 34 ++ .../templates/list/generic/src/solver/mod.rs | 4 + .../list/generic/src/solver/service.rs | 138 +++++++ .../templates/list/generic/static/app.js | 175 +++++++++ .../templates/list/generic/static/index.html | 17 + .../list/generic/static/sf-config.json | 13 + crates/solverforge-cli/tests/scaffold_test.rs | 119 ++++-- 43 files changed, 2090 insertions(+), 183 deletions(-) create mode 100644 crates/solverforge-cli/templates/basic/generic/src/api/sse.rs create mode 100644 crates/solverforge-cli/templates/basic/generic/src/constraints/affinity_match.rs create mode 100644 crates/solverforge-cli/templates/basic/generic/src/constraints/balanced_load.rs create mode 100644 crates/solverforge-cli/templates/basic/generic/src/constraints/capacity_limit.rs create mode 100644 crates/solverforge-cli/templates/basic/generic/static/app.js create mode 100644 crates/solverforge-cli/templates/basic/generic/static/sf-config.json create mode 100644 crates/solverforge-cli/templates/list/generic/Cargo.toml.tmpl create mode 100644 crates/solverforge-cli/templates/list/generic/solver.toml create mode 100644 crates/solverforge-cli/templates/list/generic/src/api/dto.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/api/mod.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/api/routes.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/api/sse.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/constraints/balanced_load.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/constraints/mod.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/data/mod.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/domain/container.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/domain/item.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/domain/mod.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/domain/plan.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/lib.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/main.rs.tmpl create mode 100644 crates/solverforge-cli/templates/list/generic/src/solver/mod.rs create mode 100644 crates/solverforge-cli/templates/list/generic/src/solver/service.rs create mode 100644 crates/solverforge-cli/templates/list/generic/static/app.js create mode 100644 crates/solverforge-cli/templates/list/generic/static/index.html create mode 100644 crates/solverforge-cli/templates/list/generic/static/sf-config.json diff --git a/crates/solverforge-cli/src/commands/new.rs b/crates/solverforge-cli/src/commands/new.rs index 02426ce9..59831182 100644 --- a/crates/solverforge-cli/src/commands/new.rs +++ b/crates/solverforge-cli/src/commands/new.rs @@ -11,6 +11,8 @@ static BASIC_GENERIC_TEMPLATE: Dir = include_dir!("$CARGO_MANIFEST_DIR/templates static EMPLOYEE_SCHEDULING_TEMPLATE: Dir = include_dir!("$CARGO_MANIFEST_DIR/templates/basic/employee-scheduling"); +static LIST_GENERIC_TEMPLATE: Dir = include_dir!("$CARGO_MANIFEST_DIR/templates/list/generic"); + static VEHICLE_ROUTING_TEMPLATE: Dir = include_dir!("$CARGO_MANIFEST_DIR/templates/list/vehicle-routing"); @@ -20,6 +22,7 @@ const AVAILABLE_TEMPLATES: &str = " --basic=employee-scheduling — assign employees to shifts List Variable (each entity owns an ordered sequence): + --list — generic list-variable skeleton --list=vehicle-routing — capacitated vehicle routing (CVRP)"; pub fn run( @@ -53,6 +56,15 @@ pub fn run( skip_readme, quiet, ), + Template::List => scaffold( + name, + &crate_name, + &LIST_GENERIC_TEMPLATE, + "list", + skip_git, + skip_readme, + quiet, + ), Template::ListVehicleRouting => scaffold( name, &crate_name, @@ -269,6 +281,15 @@ fn print_template_guidance(project_name: &str, label: &str) { println!(" solverforge generate constraint all_assigned --unary --hard"); println!(" solverforge server"); } + "list" => { + println!(" solverforge server"); + println!(); + println!(" This template includes:"); + println!(" - 2-phase solver (cheapest insertion + late acceptance)"); + println!(" - Balanced load constraint (soft)"); + println!(" - Sequence view at http://localhost:7860"); + println!(" - REST API with SSE live updates"); + } _ => { println!(" solverforge server"); } @@ -345,6 +366,7 @@ fn generate_readme(project_name: &str, _crate_name: &str, label: &str) -> String pub enum Template { Basic, BasicEmployeeScheduling, + List, ListVehicleRouting, } @@ -353,10 +375,7 @@ impl Template { match (basic, list, specialization) { (true, false, None) => Ok(Template::Basic), (true, false, Some("employee-scheduling")) => Ok(Template::BasicEmployeeScheduling), - (false, true, None) => Err(CliError::with_hint( - "the --list template requires a specialization", - "Use --list=vehicle-routing".to_string(), - )), + (false, true, None) => Ok(Template::List), (false, true, Some("vehicle-routing")) => Ok(Template::ListVehicleRouting), (false, false, None) => Err(CliError::with_hint( "specify a template flag", diff --git a/crates/solverforge-cli/src/main.rs b/crates/solverforge-cli/src/main.rs index e58f7a8b..ab1ebaae 100644 --- a/crates/solverforge-cli/src/main.rs +++ b/crates/solverforge-cli/src/main.rs @@ -13,6 +13,8 @@ use error::CliResult; const EXAMPLES: &str = "\x1b[1mExamples:\x1b[0m solverforge new my-scheduler --basic=employee-scheduling + solverforge new my-planner --basic + solverforge new my-sorter --list solverforge new my-router --list=vehicle-routing solverforge generate entity shift --planning-variable employee_idx solverforge generate constraint no_overlap --pair --hard @@ -56,14 +58,14 @@ enum Command { /// Variable class (required, mutually exclusive): /// /// --basic Standard variable — each entity holds one assigned value - /// --list=... List variable — each entity owns an ordered sequence + /// --list List variable — each entity owns an ordered sequence /// /// Specializations (append after the flag with =): /// /// --basic=employee-scheduling /// --list=vehicle-routing #[command( - after_help = "Examples:\n solverforge new my-scheduler --basic=employee-scheduling\n solverforge new my-router --list=vehicle-routing\n solverforge new my-planner --basic" + after_help = "Examples:\n solverforge new my-scheduler --basic=employee-scheduling\n solverforge new my-planner --basic\n solverforge new my-sorter --list\n solverforge new my-router --list=vehicle-routing" )] New { /// Project name (directory that will be created) @@ -73,7 +75,7 @@ enum Command { #[arg(long = "basic", value_name = "SPECIALIZATION", num_args = 0..=1, require_equals = true)] basic: Option>, - /// Scaffold a list-variable project specialization (currently: --list=vehicle-routing) + /// Scaffold a list-variable project (optionally: --list=vehicle-routing) #[arg( long = "list", value_name = "SPECIALIZATION", diff --git a/crates/solverforge-cli/templates/basic/generic/Cargo.toml.tmpl b/crates/solverforge-cli/templates/basic/generic/Cargo.toml.tmpl index d1477001..58473380 100644 --- a/crates/solverforge-cli/templates/basic/generic/Cargo.toml.tmpl +++ b/crates/solverforge-cli/templates/basic/generic/Cargo.toml.tmpl @@ -9,10 +9,12 @@ name = "{{crate_name}}" path = "src/main.rs" [dependencies] -solverforge = { version = "{{solverforge_version}}", features = ["serde"] } +solverforge = { version = "{{solverforge_version}}", features = ["serde", "console", "verbose-logging"] } +solverforge-ui = { path = "/home/pvd/dev/solverforge/solverforge-ui" } # Web server axum = "0.8" tokio = { version = "1", features = ["full"] } +tokio-stream = { version = "0.1", features = ["sync"] } tower-http = { version = "0.6", features = ["fs", "cors"] } tower = "0.5" @@ -23,7 +25,3 @@ serde_json = "1" # Utilities uuid = { version = "1", features = ["v4", "serde"] } parking_lot = "0.12" -tracing = "0.1" -tracing-subscriber = { version = "0.3", features = ["env-filter"] } -owo-colors = "4" -num-format = "0.4" diff --git a/crates/solverforge-cli/templates/basic/generic/solver.toml b/crates/solverforge-cli/templates/basic/generic/solver.toml index 72c1d025..7d53440b 100644 --- a/crates/solverforge-cli/templates/basic/generic/solver.toml +++ b/crates/solverforge-cli/templates/basic/generic/solver.toml @@ -1,2 +1,14 @@ +[[phases]] +type = "construction_heuristic" +construction_heuristic_type = "allocate_to_value_from_queue" + +[[phases]] +type = "local_search" +[phases.acceptor] +type = "late_acceptance" +late_acceptance_size = 400 +[phases.forager] +accepted_count_limit = 4 + [termination] seconds_spent_limit = 30 diff --git a/crates/solverforge-cli/templates/basic/generic/src/api/dto.rs b/crates/solverforge-cli/templates/basic/generic/src/api/dto.rs index c3e2e0e4..dff7fdae 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/api/dto.rs +++ b/crates/solverforge-cli/templates/basic/generic/src/api/dto.rs @@ -8,17 +8,29 @@ use solverforge::SolverStatus; pub struct ResourceDto { pub index: usize, pub name: String, + pub capacity: i64, + pub affinity_group: String, } impl From<&Resource> for ResourceDto { fn from(r: &Resource) -> Self { - Self { index: r.index, name: r.name.clone() } + Self { + index: r.index, + name: r.name.clone(), + capacity: r.capacity, + affinity_group: r.affinity_group.clone(), + } } } impl ResourceDto { pub fn to_resource(&self) -> Resource { - Resource::new(self.index, &self.name) + Resource::new( + self.index, + &self.name, + self.capacity, + &self.affinity_group, + ) } } @@ -27,6 +39,8 @@ impl ResourceDto { pub struct TaskDto { pub id: String, pub name: String, + pub demand: i64, + pub preferred_group: String, pub resource: Option, } @@ -41,6 +55,32 @@ pub struct PlanDto { pub solver_status: Option, } +/// Constraint analysis result. +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ConstraintAnalysisDto { + pub name: String, + #[serde(rename = "type")] + pub constraint_type: String, + pub weight: String, + pub score: String, + pub matches: Vec, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ConstraintMatchDto { + pub score: String, + pub justification: String, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct AnalyzeResponse { + pub score: String, + pub constraints: Vec, +} + impl PlanDto { pub fn from_plan(plan: &Plan, status: Option) -> Self { let resources: Vec = plan.resources.iter().map(ResourceDto::from).collect(); @@ -50,6 +90,8 @@ impl PlanDto { .map(|t| TaskDto { id: t.id.clone(), name: t.name.clone(), + demand: t.demand, + preferred_group: t.preferred_group.clone(), resource: t .resource_idx .and_then(|idx| plan.resources.get(idx)) @@ -75,6 +117,8 @@ impl PlanDto { .map(|t| Task { id: t.id.clone(), name: t.name.clone(), + demand: t.demand, + preferred_group: t.preferred_group.clone(), resource_idx: t .resource .as_ref() diff --git a/crates/solverforge-cli/templates/basic/generic/src/api/mod.rs b/crates/solverforge-cli/templates/basic/generic/src/api/mod.rs index deb063aa..97fe2445 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/api/mod.rs +++ b/crates/solverforge-cli/templates/basic/generic/src/api/mod.rs @@ -1,4 +1,5 @@ mod dto; mod routes; +mod sse; pub use routes::{router, AppState}; diff --git a/crates/solverforge-cli/templates/basic/generic/src/api/routes.rs b/crates/solverforge-cli/templates/basic/generic/src/api/routes.rs index 10e6cdf3..ee957c45 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/api/routes.rs +++ b/crates/solverforge-cli/templates/basic/generic/src/api/routes.rs @@ -1,16 +1,19 @@ use axum::{ extract::{Path, State}, http::StatusCode, - routing::{delete, get, post}, + routing::{delete, get, post, put}, Json, Router, }; +use serde::Serialize; use std::sync::Arc; use uuid::Uuid; -use super::dto::PlanDto; -use crate::data::demo_plan; +use super::dto::{AnalyzeResponse, ConstraintAnalysisDto, ConstraintMatchDto, PlanDto}; +use super::sse; +use crate::data::{generate, DemoData}; use crate::solver::{SolverService, SolverStatus}; +/// Shared application state. pub struct AppState { pub solver: SolverService, } @@ -22,32 +25,65 @@ impl AppState { } impl Default for AppState { - fn default() -> Self { - Self::new() - } + fn default() -> Self { Self::new() } } +/// Creates the API router. pub fn router(state: Arc) -> Router { Router::new() .route("/health", get(health)) - .route("/demo-data", get(get_demo_data)) - .route("/plans", post(create_plan)) - .route("/plans", get(list_plans)) - .route("/plans/{id}", get(get_plan)) - .route("/plans/{id}/status", get(get_plan_status)) - .route("/plans/{id}", delete(stop_solving)) + .route("/info", get(info)) + .route("/demo-data", get(list_demo_data)) + .route("/demo-data/{id}", get(get_demo_data)) + .route("/schedules", post(create_schedule)) + .route("/schedules", get(list_schedules)) + .route("/schedules/analyze", put(analyze_schedule)) + .route("/schedules/{id}", get(get_schedule)) + .route("/schedules/{id}/status", get(get_schedule_status)) + .route("/schedules/{id}/events", get(sse::events)) + .route("/schedules/{id}/analyze", get(analyze_by_id)) + .route("/schedules/{id}", delete(stop_solving)) .with_state(state) } -async fn health() -> &'static str { - "OK" +// ============================================================================ +// Handlers +// ============================================================================ + +#[derive(Serialize)] +struct HealthResponse { status: &'static str } + +async fn health() -> Json { + Json(HealthResponse { status: "UP" }) +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +struct InfoResponse { + name: &'static str, + version: &'static str, + solver_engine: &'static str, +} + +async fn info() -> Json { + Json(InfoResponse { + name: env!("CARGO_PKG_NAME"), + version: env!("CARGO_PKG_VERSION"), + solver_engine: "SolverForge", + }) +} + +async fn list_demo_data() -> Json> { + Json(vec!["STANDARD", "SMALL"]) } -async fn get_demo_data() -> Json { - Json(PlanDto::from_plan(&demo_plan(), None)) +async fn get_demo_data(Path(id): Path) -> Result, StatusCode> { + let demo = id.parse::().map_err(|_| StatusCode::NOT_FOUND)?; + let plan = generate(demo); + Ok(Json(PlanDto::from_plan(&plan, None))) } -async fn create_plan( +async fn create_schedule( State(state): State>, Json(dto): Json, ) -> String { @@ -57,11 +93,11 @@ async fn create_plan( id } -async fn list_plans(State(state): State>) -> Json> { +async fn list_schedules(State(state): State>) -> Json> { Json(state.solver.list_jobs()) } -async fn get_plan( +async fn get_schedule( State(state): State>, Path(id): Path, ) -> Result, StatusCode> { @@ -76,14 +112,14 @@ async fn get_plan( } } -#[derive(serde::Serialize)] +#[derive(Serialize)] #[serde(rename_all = "camelCase")] struct StatusResponse { score: Option, solver_status: SolverStatus, } -async fn get_plan_status( +async fn get_schedule_status( State(state): State>, Path(id): Path, ) -> Result, StatusCode> { @@ -110,3 +146,80 @@ async fn stop_solving( StatusCode::NOT_FOUND } } + +async fn analyze_schedule(Json(dto): Json) -> Json { + use crate::constraints::create_constraints; + use solverforge::ConstraintSet; + use solverforge::ScoreDirector; + + let plan = dto.to_domain(); + let constraints = create_constraints(); + let mut director = ScoreDirector::new(plan, constraints); + let score = director.calculate_score(); + let analyses = director.constraints().evaluate_detailed(director.working_solution()); + + let constraints_dto: Vec = analyses + .into_iter() + .map(|a| ConstraintAnalysisDto { + name: a.constraint_ref.name.clone(), + constraint_type: if a.is_hard { "hard" } else { "soft" }.to_string(), + weight: format!("{}", a.weight), + score: format!("{}", a.score), + matches: a + .matches + .iter() + .map(|m| ConstraintMatchDto { + score: format!("{}", m.score), + justification: m.justification.description.clone(), + }) + .collect(), + }) + .collect(); + + Json(AnalyzeResponse { + score: format!("{}", score), + constraints: constraints_dto, + }) +} + +async fn analyze_by_id( + State(state): State>, + Path(id): Path, +) -> Result, StatusCode> { + use crate::constraints::create_constraints; + use solverforge::ConstraintSet; + use solverforge::ScoreDirector; + + let plan = state + .solver + .with_snapshot(&id, |plan, _score, _status| plan.clone()) + .ok_or(StatusCode::NOT_FOUND)?; + + let constraints = create_constraints(); + let mut director = ScoreDirector::new(plan, constraints); + let score = director.calculate_score(); + let analyses = director.constraints().evaluate_detailed(director.working_solution()); + + let constraints_dto: Vec = analyses + .into_iter() + .map(|a| ConstraintAnalysisDto { + name: a.constraint_ref.name.clone(), + constraint_type: if a.is_hard { "hard" } else { "soft" }.to_string(), + weight: format!("{}", a.weight), + score: format!("{}", a.score), + matches: a + .matches + .iter() + .map(|m| ConstraintMatchDto { + score: format!("{}", m.score), + justification: m.justification.description.clone(), + }) + .collect(), + }) + .collect(); + + Ok(Json(AnalyzeResponse { + score: format!("{}", score), + constraints: constraints_dto, + })) +} diff --git a/crates/solverforge-cli/templates/basic/generic/src/api/sse.rs b/crates/solverforge-cli/templates/basic/generic/src/api/sse.rs new file mode 100644 index 00000000..ef9f2889 --- /dev/null +++ b/crates/solverforge-cli/templates/basic/generic/src/api/sse.rs @@ -0,0 +1,41 @@ +use axum::{ + body::Body, + extract::{Path, State}, + http::{header, StatusCode}, + response::Response, +}; +use std::sync::Arc; +use tokio_stream::wrappers::BroadcastStream; +use tokio_stream::StreamExt; + +use super::routes::AppState; + +pub async fn events( + State(state): State>, + Path(id): Path, +) -> Result, StatusCode> { + let rx = state.solver.subscribe(&id).ok_or(StatusCode::NOT_FOUND)?; + + let bootstrap_json = state.solver.sse_snapshot(&id).unwrap_or_else(|| + r#"{"solverStatus":"SOLVING"}"#.to_string() + ); + let bootstrap = tokio_stream::iter(std::iter::once(Ok::<_, std::convert::Infallible>( + format!("data: {}\n\n", bootstrap_json).into_bytes(), + ))); + + let live = BroadcastStream::new(rx).filter_map(|msg| match msg { + Ok(json) => Some(Ok::<_, std::convert::Infallible>( + format!("data: {}\n\n", json).into_bytes(), + )), + Err(_) => None, // Lagged — skip missed messages + }); + + let stream = bootstrap.chain(live); + + Ok(Response::builder() + .header(header::CONTENT_TYPE, "text/event-stream") + .header(header::CACHE_CONTROL, "no-cache") + .header("X-Accel-Buffering", "no") + .body(Body::from_stream(stream)) + .unwrap()) +} diff --git a/crates/solverforge-cli/templates/basic/generic/src/constraints/affinity_match.rs b/crates/solverforge-cli/templates/basic/generic/src/constraints/affinity_match.rs new file mode 100644 index 00000000..a39b4985 --- /dev/null +++ b/crates/solverforge-cli/templates/basic/generic/src/constraints/affinity_match.rs @@ -0,0 +1,26 @@ +use crate::domain::{Plan, Task}; +use solverforge::prelude::*; +use solverforge::stream::joiner::equal_bi; +use solverforge::stream::vec; +use solverforge::IncrementalConstraint; + +/// SOFT: Prefer assignments whose affinity group matches the task preference. +pub fn constraint() -> impl IncrementalConstraint { + ConstraintFactory::::new() + .for_each(vec(|p: &Plan| &p.tasks)) + .join(( + vec(|p: &Plan| &p.resources), + equal_bi( + |task: &Task| task.resource_idx, + |resource: &crate::domain::Resource| Some(resource.index), + ), + )) + .penalize_with(|task: &Task, resource: &crate::domain::Resource| { + if task.preferred_group == resource.affinity_group { + HardSoftScore::ZERO + } else { + HardSoftScore::of(0, task.demand) + } + }) + .named("Affinity match") +} diff --git a/crates/solverforge-cli/templates/basic/generic/src/constraints/all_assigned.rs b/crates/solverforge-cli/templates/basic/generic/src/constraints/all_assigned.rs index 35d08b4b..b606d99e 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/constraints/all_assigned.rs +++ b/crates/solverforge-cli/templates/basic/generic/src/constraints/all_assigned.rs @@ -1,6 +1,7 @@ -use crate::domain::{Plan, PlanConstraintStreams, Task}; +use crate::domain::{Plan, Task}; use solverforge::prelude::*; use solverforge::IncrementalConstraint; +use solverforge::stream::vec; /// HARD: Every task must be assigned to a resource. /// @@ -8,8 +9,8 @@ use solverforge::IncrementalConstraint; /// Common additions: capacity limits, skill matching, conflict avoidance, fairness. pub fn constraint() -> impl IncrementalConstraint { ConstraintFactory::::new() - .tasks() + .for_each(vec(|p: &Plan| &p.tasks)) .filter(|t: &Task| t.resource_idx.is_none()) - .penalize_hard() + .penalize(HardSoftScore::ONE_HARD) .named("All tasks assigned") } diff --git a/crates/solverforge-cli/templates/basic/generic/src/constraints/balanced_load.rs b/crates/solverforge-cli/templates/basic/generic/src/constraints/balanced_load.rs new file mode 100644 index 00000000..d87ef743 --- /dev/null +++ b/crates/solverforge-cli/templates/basic/generic/src/constraints/balanced_load.rs @@ -0,0 +1,16 @@ +use crate::domain::{Plan, Task}; +use solverforge::prelude::*; +use solverforge::IncrementalConstraint; +use solverforge::stream::vec; + +/// SOFT: Minimize variance in resource load (balanced assignment). +/// +/// Penalizes uneven task distribution across resources. This keeps the default +/// standard-variable template generic while giving local search a visible goal. +pub fn constraint() -> impl IncrementalConstraint { + ConstraintFactory::::new() + .for_each(vec(|p: &Plan| &p.tasks)) + .balance(|t: &Task| t.resource_idx) + .penalize(HardSoftScore::ONE_SOFT) + .named("Balanced load") +} diff --git a/crates/solverforge-cli/templates/basic/generic/src/constraints/capacity_limit.rs b/crates/solverforge-cli/templates/basic/generic/src/constraints/capacity_limit.rs new file mode 100644 index 00000000..4e70b746 --- /dev/null +++ b/crates/solverforge-cli/templates/basic/generic/src/constraints/capacity_limit.rs @@ -0,0 +1,151 @@ +use crate::domain::Plan; +use solverforge::prelude::*; +use solverforge::IncrementalConstraint; + +/// HARD: Assigned demand must not exceed resource capacity. +/// +/// This is implemented directly as an incremental constraint so the default +/// scaffold can express exact per-resource capacity limits without requiring +/// domain-specific shadow variables. +pub struct CapacityLimitConstraint { + loads: Vec, +} + +impl CapacityLimitConstraint { + pub fn new() -> Self { + Self { loads: Vec::new() } + } + + fn ensure_shape(&mut self, solution: &Plan) { + if self.loads.len() != solution.resources.len() { + self.loads = vec![0; solution.resources.len()]; + } + } + + fn overload(load: i64, capacity: i64) -> i64 { + (load - capacity).max(0) + } + + fn total_penalty(solution: &Plan, loads: &[i64]) -> i64 { + solution + .resources + .iter() + .zip(loads.iter().copied()) + .map(|(resource, load)| Self::overload(load, resource.capacity)) + .sum() + } + + fn update_load( + &mut self, + solution: &Plan, + entity_index: usize, + delta: i64, + ) -> HardSoftScore { + self.ensure_shape(solution); + let Some(task) = solution.tasks.get(entity_index) else { + return HardSoftScore::ZERO; + }; + let Some(resource_idx) = task.resource_idx else { + return HardSoftScore::ZERO; + }; + let Some(resource) = solution.resources.get(resource_idx) else { + return HardSoftScore::ZERO; + }; + + let before = Self::overload(self.loads[resource_idx], resource.capacity); + self.loads[resource_idx] += delta; + let after = Self::overload(self.loads[resource_idx], resource.capacity); + + HardSoftScore::of(-(after - before), 0) + } +} + +impl IncrementalConstraint for CapacityLimitConstraint { + fn evaluate(&self, solution: &Plan) -> HardSoftScore { + let mut loads = vec![0; solution.resources.len()]; + for task in &solution.tasks { + if let Some(resource_idx) = task.resource_idx { + if resource_idx < loads.len() { + loads[resource_idx] += task.demand; + } + } + } + HardSoftScore::of(-Self::total_penalty(solution, &loads), 0) + } + + fn match_count(&self, solution: &Plan) -> usize { + let mut loads = vec![0; solution.resources.len()]; + for task in &solution.tasks { + if let Some(resource_idx) = task.resource_idx { + if resource_idx < loads.len() { + loads[resource_idx] += task.demand; + } + } + } + solution + .resources + .iter() + .zip(loads.iter().copied()) + .filter(|(resource, load)| *load > resource.capacity) + .count() + } + + fn initialize(&mut self, solution: &Plan) -> HardSoftScore { + self.ensure_shape(solution); + for load in &mut self.loads { + *load = 0; + } + for task in &solution.tasks { + if let Some(resource_idx) = task.resource_idx { + if resource_idx < self.loads.len() { + self.loads[resource_idx] += task.demand; + } + } + } + HardSoftScore::of(-Self::total_penalty(solution, &self.loads), 0) + } + + fn on_insert( + &mut self, + solution: &Plan, + entity_index: usize, + descriptor_index: usize, + ) -> HardSoftScore { + if descriptor_index != 0 { + return HardSoftScore::ZERO; + } + self.update_load(solution, entity_index, solution.tasks[entity_index].demand) + } + + fn on_retract( + &mut self, + solution: &Plan, + entity_index: usize, + descriptor_index: usize, + ) -> HardSoftScore { + if descriptor_index != 0 { + return HardSoftScore::ZERO; + } + self.update_load(solution, entity_index, -solution.tasks[entity_index].demand) + } + + fn reset(&mut self) { + self.loads.clear(); + } + + fn name(&self) -> &str { + "Capacity limit" + } + + fn is_hard(&self) -> bool { + true + } + + fn weight(&self) -> HardSoftScore { + HardSoftScore::ONE_HARD + } +} + +pub fn constraint() -> impl IncrementalConstraint { + CapacityLimitConstraint::new() +} diff --git a/crates/solverforge-cli/templates/basic/generic/src/constraints/mod.rs b/crates/solverforge-cli/templates/basic/generic/src/constraints/mod.rs index 75f10d87..b716286b 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/constraints/mod.rs +++ b/crates/solverforge-cli/templates/basic/generic/src/constraints/mod.rs @@ -7,6 +7,9 @@ use nested tuples for more. */ mod all_assigned; +mod affinity_match; +mod balanced_load; +mod capacity_limit; pub use self::assemble::create_constraints; @@ -16,6 +19,11 @@ mod assemble { use solverforge::prelude::*; pub fn create_constraints() -> impl ConstraintSet { - (all_assigned::constraint(),) + ( + all_assigned::constraint(), + capacity_limit::constraint(), + affinity_match::constraint(), + balanced_load::constraint(), + ) } } diff --git a/crates/solverforge-cli/templates/basic/generic/src/data/mod.rs b/crates/solverforge-cli/templates/basic/generic/src/data/mod.rs index 0de233cd..0588e4fa 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/data/mod.rs +++ b/crates/solverforge-cli/templates/basic/generic/src/data/mod.rs @@ -2,17 +2,68 @@ Replace this with your own data loading (CSV, JSON, database, …). */ +use std::str::FromStr; + use crate::domain::{Plan, Resource, Task}; -/// Returns a small demo plan so the app runs out of the box. -pub fn demo_plan() -> Plan { - let resources = vec![ - Resource::new(0, "Resource A"), - Resource::new(1, "Resource B"), - Resource::new(2, "Resource C"), - ]; - let tasks = (0..9) - .map(|i| Task::new(i.to_string(), format!("Task {}", i + 1))) +/// Available demo datasets. +#[derive(Debug, Clone, Copy)] +pub enum DemoData { + Small, + Standard, +} + +impl FromStr for DemoData { + type Err = (); + + fn from_str(s: &str) -> Result { + match s.to_uppercase().as_str() { + "SMALL" => Ok(DemoData::Small), + "STANDARD" => Ok(DemoData::Standard), + _ => Err(()), + } + } +} + +/// Generates a demo plan for the given dataset. +pub fn generate(demo: DemoData) -> Plan { + match demo { + DemoData::Small => generate_plan(6, 24), + DemoData::Standard => generate_plan(12, 84), + } +} + +fn generate_plan(n_resources: usize, n_tasks: usize) -> Plan { + let groups = ["Amber", "Blue", "Cyan", "Jade"]; + let resources: Vec = (0..n_resources) + .map(|i| { + let name = format!("Resource {}", (b'A' + i as u8) as char); + let capacity = 14 + ((i % 3) as i64 * 2); + let affinity_group = groups[i % groups.len()]; + Resource::new(i, name, capacity, affinity_group) + }) .collect(); + + let tasks: Vec = (0..n_tasks) + .map(|i| { + let demand = 1 + (i % 3) as i64; + let preferred_group = if i < n_tasks * 36 / 100 { + groups[0] + } else if i < n_tasks * 59 / 100 { + groups[1] + } else if i < n_tasks * 80 / 100 { + groups[2] + } else { + groups[3] + }; + Task::new( + i.to_string(), + format!("Task {}", i + 1), + demand, + preferred_group, + ) + }) + .collect(); + Plan::new(resources, tasks) } diff --git a/crates/solverforge-cli/templates/basic/generic/src/domain/mod.rs b/crates/solverforge-cli/templates/basic/generic/src/domain/mod.rs index a5f92a86..bb3751c1 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/domain/mod.rs +++ b/crates/solverforge-cli/templates/basic/generic/src/domain/mod.rs @@ -2,6 +2,6 @@ mod plan; mod resource; mod task; -pub use plan::{Plan, PlanConstraintStreams}; +pub use plan::Plan; pub use resource::Resource; pub use task::Task; diff --git a/crates/solverforge-cli/templates/basic/generic/src/domain/resource.rs b/crates/solverforge-cli/templates/basic/generic/src/domain/resource.rs index 86f2978c..f87d052e 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/domain/resource.rs +++ b/crates/solverforge-cli/templates/basic/generic/src/domain/resource.rs @@ -11,13 +11,24 @@ pub struct Resource { /// Index into `Plan.resources` — used for O(1) joins in constraints. pub index: usize, pub name: String, + /// Generic capacity budget consumed by assigned entities. + pub capacity: i64, + /// Generic affinity label for assignment preferences. + pub affinity_group: String, } impl Resource { - pub fn new(index: usize, name: impl Into) -> Self { + pub fn new( + index: usize, + name: impl Into, + capacity: i64, + affinity_group: impl Into, + ) -> Self { Self { index, name: name.into(), + capacity, + affinity_group: affinity_group.into(), } } diff --git a/crates/solverforge-cli/templates/basic/generic/src/domain/task.rs b/crates/solverforge-cli/templates/basic/generic/src/domain/task.rs index f497741a..4af67642 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/domain/task.rs +++ b/crates/solverforge-cli/templates/basic/generic/src/domain/task.rs @@ -11,6 +11,10 @@ pub struct Task { #[planning_id] pub id: String, pub name: String, + /// Generic load contribution applied to the assigned resource. + pub demand: i64, + /// Generic affinity label for preferred assignments. + pub preferred_group: String, /// Index into `Plan.resources`. `None` means unassigned. /// /// This is the planning variable the solver optimizes. @@ -20,10 +24,17 @@ pub struct Task { } impl Task { - pub fn new(id: impl Into, name: impl Into) -> Self { + pub fn new( + id: impl Into, + name: impl Into, + demand: i64, + preferred_group: impl Into, + ) -> Self { Self { id: id.into(), name: name.into(), + demand, + preferred_group: preferred_group.into(), resource_idx: None, } } diff --git a/crates/solverforge-cli/templates/basic/generic/src/main.rs.tmpl b/crates/solverforge-cli/templates/basic/generic/src/main.rs.tmpl index 2336762d..72abb362 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/main.rs.tmpl +++ b/crates/solverforge-cli/templates/basic/generic/src/main.rs.tmpl @@ -4,21 +4,14 @@ use {{crate_name}}::api; -use owo_colors::OwoColorize; use std::net::SocketAddr; use std::sync::Arc; use tower_http::cors::{Any, CorsLayer}; use tower_http::services::ServeDir; -use tracing_subscriber::EnvFilter; #[tokio::main] async fn main() { - tracing_subscriber::fmt() - .with_env_filter( - EnvFilter::from_default_env() - .add_directive("{{crate_name}}=info".parse().unwrap()), - ) - .init(); + solverforge::init_console(); let state = Arc::new(api::AppState::new()); @@ -28,21 +21,13 @@ async fn main() { .allow_headers(Any); let app = api::router(state) + .merge(solverforge_ui::routes()) .fallback_service(ServeDir::new("static")) .layer(cors); let addr = SocketAddr::from(([0, 0, 0, 0], 7860)); - println!( - "{} {} listening on {}", - "▸".bright_green(), - "{{project_name}}".bright_white().bold(), - format!("http://{}", addr).bright_cyan().underline() - ); - println!( - "{} Open {} in your browser\n", - "▸".bright_green(), - "http://localhost:7860".bright_cyan().underline() - ); + println!("▸ {{project_name}} listening on http://{}", addr); + println!("▸ Open http://localhost:7860 in your browser\n"); let listener = tokio::net::TcpListener::bind(addr).await.unwrap(); axum::serve(listener, app).await.unwrap(); diff --git a/crates/solverforge-cli/templates/basic/generic/src/solver/service.rs b/crates/solverforge-cli/templates/basic/generic/src/solver/service.rs index 994d533a..4edd8da5 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/solver/service.rs +++ b/crates/solverforge-cli/templates/basic/generic/src/solver/service.rs @@ -1,6 +1,7 @@ -use parking_lot::RwLock; use std::collections::HashMap; -use tokio::sync::mpsc; +use std::sync::Arc; +use parking_lot::RwLock; +use tokio::sync::{broadcast, mpsc}; use solverforge::{HardSoftScore, SolverManager, SolverStatus}; @@ -9,52 +10,74 @@ use crate::domain::Plan; // Static manager — must be 'static for SolverManager::solve. static MANAGER: SolverManager = SolverManager::new(); +fn sse_payload(score: Option, status: SolverStatus, mps: u64) -> String { + let score_str = score.map(|s| format!("{}", s)); + let status_str = match status { + SolverStatus::Solving => "SOLVING", + SolverStatus::NotSolving => "NOT_SOLVING", + }; + match score_str { + Some(s) => format!(r#"{{"score":"{}","solverStatus":"{}","movesPerSecond":{}}}"#, s, status_str, mps), + None => format!(r#"{{"score":null,"solverStatus":"{}","movesPerSecond":{}}}"#, status_str, mps), + } +} + struct JobState { slot_id: usize, latest: Option, score: Option, - receiver: mpsc::UnboundedReceiver<(Plan, HardSoftScore)>, status: SolverStatus, + sse_tx: broadcast::Sender, } /// Manages solving jobs using the framework SolverManager. pub struct SolverService { - jobs: RwLock>, + jobs: Arc>>, } impl SolverService { pub fn new() -> Self { - Self { jobs: RwLock::new(HashMap::new()) } + Self { jobs: Arc::new(RwLock::new(HashMap::new())) } } pub fn start_solving(&self, id: String, plan: Plan) { let (slot_id, receiver) = MANAGER.solve(plan); + let (sse_tx, _) = broadcast::channel(64); let state = JobState { slot_id, latest: None, score: None, - receiver, status: SolverStatus::Solving, + sse_tx: sse_tx.clone(), }; - self.jobs.write().insert(id, state); + self.jobs.write().insert(id.clone(), state); + + let jobs = Arc::clone(&self.jobs); + tokio::spawn(async move { + drain_receiver(jobs, id, slot_id, sse_tx, receiver).await; + }); } - // Polls the channel and calls `f` with the latest plan. pub fn with_snapshot( &self, id: &str, f: impl FnOnce(&Plan, Option, SolverStatus) -> R, ) -> Option { - let mut jobs = self.jobs.write(); - let state = jobs.get_mut(id)?; - while let Ok((solution, score)) = state.receiver.try_recv() { - state.latest = Some(solution); - state.score = Some(score); - } - state.status = MANAGER.get_status(state.slot_id); + let jobs = self.jobs.read(); + let state = jobs.get(id)?; Some(f(state.latest.as_ref()?, state.score, state.status)) } + pub fn subscribe(&self, id: &str) -> Option> { + self.jobs.read().get(id).map(|s| s.sse_tx.subscribe()) + } + + pub fn sse_snapshot(&self, id: &str) -> Option { + let jobs = self.jobs.read(); + let state = jobs.get(id)?; + Some(sse_payload(state.score, state.status, 0)) + } + pub fn has_job(&self, id: &str) -> bool { self.jobs.read().contains_key(id) } @@ -80,6 +103,34 @@ impl SolverService { } } +async fn drain_receiver( + jobs: Arc>>, + id: String, + slot_id: usize, + sse_tx: broadcast::Sender, + mut receiver: mpsc::UnboundedReceiver<(Plan, HardSoftScore, u64)>, +) { + let mut last_mps = 0u64; + while let Some((solution, score, mps)) = receiver.recv().await { + last_mps = mps; + let _ = sse_tx.send(sse_payload(Some(score), SolverStatus::Solving, mps)); + let mut jobs = jobs.write(); + if let Some(state) = jobs.get_mut(&id) { + state.latest = Some(solution); + state.score = Some(score); + } + } + let _ = sse_tx.send(sse_payload( + jobs.read().get(&id).and_then(|s| s.score), + SolverStatus::NotSolving, + last_mps, + )); + let mut jobs = jobs.write(); + if let Some(state) = jobs.get_mut(&id) { + state.status = MANAGER.get_status(slot_id); + } +} + impl Default for SolverService { fn default() -> Self { Self::new() diff --git a/crates/solverforge-cli/templates/basic/generic/static/app.js b/crates/solverforge-cli/templates/basic/generic/static/app.js new file mode 100644 index 00000000..7feb0c5f --- /dev/null +++ b/crates/solverforge-cli/templates/basic/generic/static/app.js @@ -0,0 +1,345 @@ +/* app.js — {{project_name}} SolverForge UI */ + +(async function () { + 'use strict'; + + var config = await fetch('/sf-config.json').then(function (r) { return r.json(); }); + + var app = document.getElementById('sf-app'); + + // Backend and solver + var backend = SF.createBackend({ baseUrl: '' }); + var statusBar = SF.createStatusBar({ constraints: config.constraints }); + var solver = SF.createSolver({ + backend: backend, + statusBar: statusBar, + onUpdate: function (data) { renderHero(data); renderTables(data); }, + onComplete: function (data) { renderHero(data); renderTables(data); }, + }); + + // Header + var header = SF.createHeader({ + logo: '/sf/img/solverforge-horizontal.svg', + title: config.title, + subtitle: config.subtitle, + tabs: [ + { id: 'hero', label: heroLabel(), icon: heroIcon(), active: true }, + { id: 'data', label: 'Data', icon: 'fa-table' }, + { id: 'api', label: 'REST API', icon: 'fa-book' }, + ], + actions: { + onSolve: function () { loadAndSolve(); }, + onStop: function () { solver.stop(); }, + onAnalyze: function () { openAnalysis(); }, + }, + onTabChange: function (tab) { + heroPanel.style.display = tab === 'hero' ? '' : 'none'; + dataPanel.style.display = tab === 'data' ? '' : 'none'; + apiPanel.style.display = tab === 'api' ? '' : 'none'; + }, + }); + app.appendChild(header); + app.appendChild(statusBar.el); + + // Hero panel + var heroPanel = SF.el('div', { className: 'sf-content' }); + var heroContainer = SF.el('div', { id: 'sf-hero' }); + heroPanel.appendChild(heroContainer); + app.appendChild(heroPanel); + + // Data panel + var dataPanel = SF.el('div', { className: 'sf-content', style: { display: 'none' } }); + var tablesContainer = SF.el('div', { id: 'sf-tables' }); + dataPanel.appendChild(tablesContainer); + app.appendChild(dataPanel); + + // API panel + var apiPanel = SF.el('div', { className: 'sf-content', style: { display: 'none' } }); + var guide = SF.createApiGuide({ + endpoints: [ + { method: 'GET', path: '/demo-data/STANDARD', description: 'Fetch demo data', curl: 'curl http://localhost:7860/demo-data/STANDARD' }, + { method: 'POST', path: '/schedules', description: 'Submit a plan for solving', curl: 'curl -X POST -H "Content-Type: application/json" http://localhost:7860/schedules -d @plan.json' }, + { method: 'GET', path: '/schedules/{id}', description: 'Get current best solution', curl: 'curl http://localhost:7860/schedules/{id}' }, + { method: 'GET', path: '/schedules/{id}/events', description: 'Stream solver updates (SSE)', curl: 'curl -N http://localhost:7860/schedules/{id}/events' }, + { method: 'GET', path: '/schedules/{id}/analyze', description: 'Get constraint analysis', curl: 'curl http://localhost:7860/schedules/{id}/analyze' }, + { method: 'DELETE', path: '/schedules/{id}', description: 'Stop solving and remove job', curl: 'curl -X DELETE http://localhost:7860/schedules/{id}' }, + ], + }); + apiPanel.appendChild(guide); + app.appendChild(apiPanel); + + // Footer + var footer = SF.createFooter({ + links: [ + { label: 'SolverForge', url: 'https://www.solverforge.org' }, + { label: 'Docs', url: 'https://www.solverforge.org/docs' }, + ], + }); + app.appendChild(footer); + + // Analysis modal + var analysisModal = SF.createModal({ title: 'Score Analysis', width: '700px' }); + + // Load demo data on startup + fetch('/demo-data/STANDARD') + .then(function (r) { return r.json(); }) + .then(function (data) { renderHero(data); renderTables(data); }) + .catch(function () {}); + + function loadAndSolve() { + fetch('/demo-data/STANDARD') + .then(function (r) { return r.json(); }) + .then(function (data) { solver.start(data); }) + .catch(function (err) { console.error('Demo load failed:', err); }); + } + + function openAnalysis() { + var id = solver.getJobId(); + if (!id) return; + backend.analyze(id) + .then(function (analysis) { + analysisModal.setBody(buildAnalysisHtml(analysis)); + analysisModal.open(); + }) + .catch(function () {}); + } + + function buildAnalysisHtml(analysis) { + if (!analysis || !analysis.constraints) return '

No analysis available.

'; + var html = '

Score: ' + SF.escHtml(analysis.score) + '

'; + html += ''; + analysis.constraints.forEach(function (c) { + html += ''; + }); + html += '
ConstraintTypeScoreMatches
' + SF.escHtml(c.name) + '' + SF.escHtml(c.type) + '' + SF.escHtml(c.score) + '' + (c.matches ? c.matches.length : 0) + '
'; + return html; + } + + function heroLabel() { + return isTimetableView() ? 'Timetable' : 'Assignments'; + } + + function heroIcon() { + return isTimetableView() ? 'fa-calendar-days' : 'fa-table-cells-large'; + } + + function isTimetableView() { + return config.view && config.view.type === 'timetable'; + } + + function renderHero(data) { + if (isTimetableView()) { + renderTimetable(data); + } else { + renderAssignmentBoard(data); + } + } + + function renderTimetable(data) { + heroContainer.innerHTML = ''; + var resources = data.resources || []; + var tasks = data.tasks || []; + if (!resources.length) return; + + var fields = config.view && config.view.fields ? config.view.fields : {}; + var startField = fields.start; + var endField = fields.end; + var labelField = fields.label || 'name'; + var positionedTasks = tasks.filter(function (task) { + return typeof task[startField] === 'number' && typeof task[endField] === 'number'; + }); + var maxEnd = positionedTasks.reduce(function (maxValue, task) { + return Math.max(maxValue, task[endField]); + }, 0); + var numSlots = Math.max(maxEnd, 1); + + var hdr = SF.rail.createHeader({ + label: config.facts[0] ? config.facts[0].label : 'Resource', + labelWidth: 160, + columns: Array.from({ length: numSlots }, function (_, i) { return 'Slot ' + (i + 1); }), + }); + heroContainer.appendChild(hdr); + + resources.forEach(function (res) { + var assigned = tasks.filter(function (t) { + return t.resource && t.resource.name === res.name; + }); + var card = SF.rail.createCard({ + id: 'res-' + res.index, + name: res.name, + labelWidth: 160, + columns: numSlots, + stats: [{ label: 'Tasks', value: assigned.length }], + }); + assigned.forEach(function (task) { + if (typeof task[startField] !== 'number' || typeof task[endField] !== 'number') return; + card.addBlock({ + label: String(task[labelField] || task.name || task.id || 'Item'), + start: task[startField], + end: task[endField], + horizon: numSlots, + color: SF.colors.pick(String(task[labelField] || task.name || task.id || 'Item')), + }); + }); + heroContainer.appendChild(card.el); + }); + } + + function renderAssignmentBoard(data) { + heroContainer.innerHTML = ''; + var resources = data.resources || []; + var tasks = data.tasks || []; + var assignedByResource = {}; + var totalDemand = 0; + var assignedDemand = 0; + var affinityMatches = 0; + + resources.forEach(function (res) { + assignedByResource[res.name] = []; + }); + + tasks.forEach(function (task) { + totalDemand += Number(task.demand || 0); + var resourceName = task.resource && task.resource.name; + if (resourceName && assignedByResource[resourceName]) { + assignedByResource[resourceName].push(task); + assignedDemand += Number(task.demand || 0); + if (task.resource.affinityGroup === task.preferredGroup) affinityMatches += 1; + } + }); + + var totalCapacity = resources.reduce(function (sum, resource) { + return sum + Number(resource.capacity || 0); + }, 0); + + var summary = SF.el('div', { className: 'sf-section' }); + summary.appendChild(SF.el('h3', null, 'Assignment Overview')); + summary.appendChild(SF.createTable({ + columns: ['Metric', 'Value'], + rows: [ + ['Resources', String(resources.length)], + ['Tasks', String(tasks.length)], + ['Total capacity', String(totalCapacity)], + ['Total demand', String(totalDemand)], + ['Assigned', String(tasks.filter(function (task) { return !!task.resource; }).length)], + ['Assigned demand', String(assignedDemand)], + ['Affinity matches', String(affinityMatches)], + ['Unassigned', String(tasks.filter(function (task) { return !task.resource; }).length)], + ], + })); + heroContainer.appendChild(summary); + + resources + .slice() + .sort(function (a, b) { + return resourceLoad(assignedByResource[b.name]) - resourceLoad(assignedByResource[a.name]); + }) + .forEach(function (res) { + heroContainer.appendChild(buildAssignmentSection( + res, + assignedByResource[res.name], + 'Tasks' + )); + }); + + var unassigned = tasks.filter(function (task) { return !task.resource; }); + if (unassigned.length) { + heroContainer.appendChild(buildAssignmentSection({ + name: 'Unassigned', + capacity: 0, + affinityGroup: '—', + }, unassigned, 'Tasks')); + } + } + + function resourceLoad(tasks) { + return tasks.reduce(function (sum, task) { + return sum + Number(task.demand || 0); + }, 0); + } + + function buildAssignmentSection(resource, tasks, statLabel) { + var section = SF.el('div', { className: 'sf-section' }); + var load = resourceLoad(tasks); + var title = resource.name; + if (resource.capacity) { + title += ' (' + load + '/' + resource.capacity + ' load)'; + } else { + title += ' (' + tasks.length + ')'; + } + section.appendChild(SF.el('h3', null, title)); + if (!tasks.length) { + section.appendChild(SF.el('p', null, 'No assigned entities.')); + return section; + } + var matches = tasks.filter(function (task) { + return task.preferredGroup === resource.affinityGroup; + }).length; + section.appendChild(SF.createTable({ + columns: ['Affinity group', 'Capacity', 'Load', 'Preference matches'], + rows: [[ + resource.affinityGroup || '—', + String(resource.capacity || 0), + String(load), + String(matches), + ]], + })); + section.appendChild(SF.createTable({ + columns: ['Entity', 'Id', 'Demand', 'Preferred group', statLabel], + rows: tasks + .slice() + .sort(function (a, b) { return Number(b.demand || 0) - Number(a.demand || 0); }) + .map(function (task, index) { + return [ + task.name || 'Unnamed', + task.id || '—', + String(task.demand || 0), + task.preferredGroup || '—', + String(index + 1), + ]; + }), + })); + return section; + } + + function renderTables(data) { + tablesContainer.innerHTML = ''; + + config.entities.forEach(function (entity) { + var items = data[entity.plural] || data[entity.name + 's'] || []; + if (!items.length) return; + var cols = Object.keys(items[0]); + var rows = items.map(function (item) { + return cols.map(function (k) { + var v = item[k]; + if (v === null || v === undefined) return '—'; + if (typeof v === 'object') return JSON.stringify(v); + return String(v); + }); + }); + var section = SF.el('div', { className: 'sf-section' }); + section.appendChild(SF.el('h3', null, entity.label)); + section.appendChild(SF.createTable({ columns: cols, rows: rows })); + tablesContainer.appendChild(section); + }); + + config.facts.forEach(function (fact) { + var items = data[fact.plural] || data[fact.name + 's'] || []; + if (!items.length) return; + var cols = Object.keys(items[0]); + var rows = items.map(function (item) { + return cols.map(function (k) { + var v = item[k]; + if (v === null || v === undefined) return '—'; + if (typeof v === 'object') return JSON.stringify(v); + return String(v); + }); + }); + var section = SF.el('div', { className: 'sf-section' }); + section.appendChild(SF.el('h3', null, fact.label)); + section.appendChild(SF.createTable({ columns: cols, rows: rows })); + tablesContainer.appendChild(section); + }); + } + +})(); diff --git a/crates/solverforge-cli/templates/basic/generic/static/index.html b/crates/solverforge-cli/templates/basic/generic/static/index.html index 0ae6b636..a808a71d 100644 --- a/crates/solverforge-cli/templates/basic/generic/static/index.html +++ b/crates/solverforge-cli/templates/basic/generic/static/index.html @@ -3,82 +3,15 @@ - SolverForge — Standard Variable - + {{project_name}} — SolverForge + + + + -

SolverForge — Standard Variable

-

This is the generic standard variable skeleton. Replace Resource, Task, and Plan with your domain types.

- - - - - -
- - +
+ + diff --git a/crates/solverforge-cli/templates/basic/generic/static/sf-config.json b/crates/solverforge-cli/templates/basic/generic/static/sf-config.json new file mode 100644 index 00000000..bf20c008 --- /dev/null +++ b/crates/solverforge-cli/templates/basic/generic/static/sf-config.json @@ -0,0 +1,19 @@ +{ + "title": "{{project_name}}", + "subtitle": "Constraint Optimizer", + "constraints": ["all_assigned", "capacity_limit", "affinity_match", "balanced_load"], + "entities": [ + {"name": "task", "label": "Tasks", "plural": "tasks"} + ], + "facts": [ + {"name": "resource", "label": "Resources", "plural": "resources"} + ], + "view": { + "type": "assignment_board", + "resource": "resource", + "task": "task", + "fields": { + "label": "name" + } + } +} diff --git a/crates/solverforge-cli/templates/list/generic/Cargo.toml.tmpl b/crates/solverforge-cli/templates/list/generic/Cargo.toml.tmpl new file mode 100644 index 00000000..58473380 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/Cargo.toml.tmpl @@ -0,0 +1,27 @@ +[package] +name = "{{project_name}}" +version = "0.1.0" +edition = "2021" +description = "Constraint optimizer built with SolverForge" + +[[bin]] +name = "{{crate_name}}" +path = "src/main.rs" + +[dependencies] +solverforge = { version = "{{solverforge_version}}", features = ["serde", "console", "verbose-logging"] } +solverforge-ui = { path = "/home/pvd/dev/solverforge/solverforge-ui" } +# Web server +axum = "0.8" +tokio = { version = "1", features = ["full"] } +tokio-stream = { version = "0.1", features = ["sync"] } +tower-http = { version = "0.6", features = ["fs", "cors"] } +tower = "0.5" + +# Serialization +serde = { version = "1", features = ["derive"] } +serde_json = "1" + +# Utilities +uuid = { version = "1", features = ["v4", "serde"] } +parking_lot = "0.12" diff --git a/crates/solverforge-cli/templates/list/generic/solver.toml b/crates/solverforge-cli/templates/list/generic/solver.toml new file mode 100644 index 00000000..88c3b870 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/solver.toml @@ -0,0 +1,14 @@ +[[phases]] +type = "construction_heuristic" +construction_heuristic_type = "list_cheapest_insertion" + +[[phases]] +type = "local_search" +[phases.acceptor] +type = "late_acceptance" +late_acceptance_size = 400 +[phases.forager] +accepted_count_limit = 4 + +[termination] +seconds_spent_limit = 30 diff --git a/crates/solverforge-cli/templates/list/generic/src/api/dto.rs b/crates/solverforge-cli/templates/list/generic/src/api/dto.rs new file mode 100644 index 00000000..17ece430 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/api/dto.rs @@ -0,0 +1,114 @@ +use serde::{Deserialize, Serialize}; + +use crate::domain::{Container, Item, Plan}; +use solverforge::SolverStatus; + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ItemDto { + pub index: usize, + pub name: String, +} + +impl From<&Item> for ItemDto { + fn from(i: &Item) -> Self { + Self { index: i.index, name: i.name.clone() } + } +} + +impl ItemDto { + pub fn to_item(&self) -> Item { + Item::new(self.index, &self.name) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ContainerDto { + pub id: usize, + pub name: String, + /// Item names in sequence order. + pub items: Vec, +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PlanDto { + pub items: Vec, + pub containers: Vec, + #[serde(default)] + pub score: Option, + #[serde(default)] + pub solver_status: Option, +} + +/// Constraint analysis result. +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ConstraintAnalysisDto { + pub name: String, + #[serde(rename = "type")] + pub constraint_type: String, + pub weight: String, + pub score: String, + pub matches: Vec, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct ConstraintMatchDto { + pub score: String, + pub justification: String, +} + +#[derive(Debug, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct AnalyzeResponse { + pub score: String, + pub constraints: Vec, +} + +impl PlanDto { + pub fn from_plan(plan: &Plan, status: Option) -> Self { + let items: Vec = plan.item_facts.iter().map(ItemDto::from).collect(); + let containers: Vec = plan + .containers + .iter() + .map(|c| ContainerDto { + id: c.id, + name: c.name.clone(), + items: c + .items + .iter() + .filter_map(|&idx| plan.item_facts.get(idx)) + .map(|item| item.name.clone()) + .collect(), + }) + .collect(); + Self { + items, + containers, + score: plan.score.map(|s| s.to_string()), + solver_status: status, + } + } + + pub fn to_domain(&self) -> Plan { + let item_facts: Vec = self.items.iter().map(ItemDto::to_item).collect(); + let name_to_idx: std::collections::HashMap<&str, usize> = + item_facts.iter().map(|i| (i.name.as_str(), i.index)).collect(); + let containers: Vec = self + .containers + .iter() + .map(|c| { + let items: Vec = c + .items + .iter() + .filter_map(|name| name_to_idx.get(name.as_str()).copied()) + .collect(); + Container { id: c.id, name: c.name.clone(), items } + }) + .collect(); + Plan::new(item_facts, containers) + } +} diff --git a/crates/solverforge-cli/templates/list/generic/src/api/mod.rs b/crates/solverforge-cli/templates/list/generic/src/api/mod.rs new file mode 100644 index 00000000..97fe2445 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/api/mod.rs @@ -0,0 +1,5 @@ +mod dto; +mod routes; +mod sse; + +pub use routes::{router, AppState}; diff --git a/crates/solverforge-cli/templates/list/generic/src/api/routes.rs b/crates/solverforge-cli/templates/list/generic/src/api/routes.rs new file mode 100644 index 00000000..ee957c45 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/api/routes.rs @@ -0,0 +1,225 @@ +use axum::{ + extract::{Path, State}, + http::StatusCode, + routing::{delete, get, post, put}, + Json, Router, +}; +use serde::Serialize; +use std::sync::Arc; +use uuid::Uuid; + +use super::dto::{AnalyzeResponse, ConstraintAnalysisDto, ConstraintMatchDto, PlanDto}; +use super::sse; +use crate::data::{generate, DemoData}; +use crate::solver::{SolverService, SolverStatus}; + +/// Shared application state. +pub struct AppState { + pub solver: SolverService, +} + +impl AppState { + pub fn new() -> Self { + Self { solver: SolverService::new() } + } +} + +impl Default for AppState { + fn default() -> Self { Self::new() } +} + +/// Creates the API router. +pub fn router(state: Arc) -> Router { + Router::new() + .route("/health", get(health)) + .route("/info", get(info)) + .route("/demo-data", get(list_demo_data)) + .route("/demo-data/{id}", get(get_demo_data)) + .route("/schedules", post(create_schedule)) + .route("/schedules", get(list_schedules)) + .route("/schedules/analyze", put(analyze_schedule)) + .route("/schedules/{id}", get(get_schedule)) + .route("/schedules/{id}/status", get(get_schedule_status)) + .route("/schedules/{id}/events", get(sse::events)) + .route("/schedules/{id}/analyze", get(analyze_by_id)) + .route("/schedules/{id}", delete(stop_solving)) + .with_state(state) +} + +// ============================================================================ +// Handlers +// ============================================================================ + +#[derive(Serialize)] +struct HealthResponse { status: &'static str } + +async fn health() -> Json { + Json(HealthResponse { status: "UP" }) +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +struct InfoResponse { + name: &'static str, + version: &'static str, + solver_engine: &'static str, +} + +async fn info() -> Json { + Json(InfoResponse { + name: env!("CARGO_PKG_NAME"), + version: env!("CARGO_PKG_VERSION"), + solver_engine: "SolverForge", + }) +} + +async fn list_demo_data() -> Json> { + Json(vec!["STANDARD", "SMALL"]) +} + +async fn get_demo_data(Path(id): Path) -> Result, StatusCode> { + let demo = id.parse::().map_err(|_| StatusCode::NOT_FOUND)?; + let plan = generate(demo); + Ok(Json(PlanDto::from_plan(&plan, None))) +} + +async fn create_schedule( + State(state): State>, + Json(dto): Json, +) -> String { + let id = Uuid::new_v4().to_string(); + let plan = dto.to_domain(); + state.solver.start_solving(id.clone(), plan); + id +} + +async fn list_schedules(State(state): State>) -> Json> { + Json(state.solver.list_jobs()) +} + +async fn get_schedule( + State(state): State>, + Path(id): Path, +) -> Result, StatusCode> { + if !state.solver.has_job(&id) { + return Err(StatusCode::NOT_FOUND); + } + match state.solver.with_snapshot(&id, |plan, _score, status| { + PlanDto::from_plan(plan, Some(status)) + }) { + Some(dto) => Ok(Json(dto)), + None => Err(StatusCode::NOT_FOUND), + } +} + +#[derive(Serialize)] +#[serde(rename_all = "camelCase")] +struct StatusResponse { + score: Option, + solver_status: SolverStatus, +} + +async fn get_schedule_status( + State(state): State>, + Path(id): Path, +) -> Result, StatusCode> { + if !state.solver.has_job(&id) { + return Err(StatusCode::NOT_FOUND); + } + match state.solver.with_snapshot(&id, |plan, _score, status| StatusResponse { + score: plan.score.map(|s| s.to_string()), + solver_status: status, + }) { + Some(resp) => Ok(Json(resp)), + None => Err(StatusCode::NOT_FOUND), + } +} + +async fn stop_solving( + State(state): State>, + Path(id): Path, +) -> StatusCode { + state.solver.stop_solving(&id); + if state.solver.remove_job(&id) { + StatusCode::NO_CONTENT + } else { + StatusCode::NOT_FOUND + } +} + +async fn analyze_schedule(Json(dto): Json) -> Json { + use crate::constraints::create_constraints; + use solverforge::ConstraintSet; + use solverforge::ScoreDirector; + + let plan = dto.to_domain(); + let constraints = create_constraints(); + let mut director = ScoreDirector::new(plan, constraints); + let score = director.calculate_score(); + let analyses = director.constraints().evaluate_detailed(director.working_solution()); + + let constraints_dto: Vec = analyses + .into_iter() + .map(|a| ConstraintAnalysisDto { + name: a.constraint_ref.name.clone(), + constraint_type: if a.is_hard { "hard" } else { "soft" }.to_string(), + weight: format!("{}", a.weight), + score: format!("{}", a.score), + matches: a + .matches + .iter() + .map(|m| ConstraintMatchDto { + score: format!("{}", m.score), + justification: m.justification.description.clone(), + }) + .collect(), + }) + .collect(); + + Json(AnalyzeResponse { + score: format!("{}", score), + constraints: constraints_dto, + }) +} + +async fn analyze_by_id( + State(state): State>, + Path(id): Path, +) -> Result, StatusCode> { + use crate::constraints::create_constraints; + use solverforge::ConstraintSet; + use solverforge::ScoreDirector; + + let plan = state + .solver + .with_snapshot(&id, |plan, _score, _status| plan.clone()) + .ok_or(StatusCode::NOT_FOUND)?; + + let constraints = create_constraints(); + let mut director = ScoreDirector::new(plan, constraints); + let score = director.calculate_score(); + let analyses = director.constraints().evaluate_detailed(director.working_solution()); + + let constraints_dto: Vec = analyses + .into_iter() + .map(|a| ConstraintAnalysisDto { + name: a.constraint_ref.name.clone(), + constraint_type: if a.is_hard { "hard" } else { "soft" }.to_string(), + weight: format!("{}", a.weight), + score: format!("{}", a.score), + matches: a + .matches + .iter() + .map(|m| ConstraintMatchDto { + score: format!("{}", m.score), + justification: m.justification.description.clone(), + }) + .collect(), + }) + .collect(); + + Ok(Json(AnalyzeResponse { + score: format!("{}", score), + constraints: constraints_dto, + })) +} diff --git a/crates/solverforge-cli/templates/list/generic/src/api/sse.rs b/crates/solverforge-cli/templates/list/generic/src/api/sse.rs new file mode 100644 index 00000000..ef9f2889 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/api/sse.rs @@ -0,0 +1,41 @@ +use axum::{ + body::Body, + extract::{Path, State}, + http::{header, StatusCode}, + response::Response, +}; +use std::sync::Arc; +use tokio_stream::wrappers::BroadcastStream; +use tokio_stream::StreamExt; + +use super::routes::AppState; + +pub async fn events( + State(state): State>, + Path(id): Path, +) -> Result, StatusCode> { + let rx = state.solver.subscribe(&id).ok_or(StatusCode::NOT_FOUND)?; + + let bootstrap_json = state.solver.sse_snapshot(&id).unwrap_or_else(|| + r#"{"solverStatus":"SOLVING"}"#.to_string() + ); + let bootstrap = tokio_stream::iter(std::iter::once(Ok::<_, std::convert::Infallible>( + format!("data: {}\n\n", bootstrap_json).into_bytes(), + ))); + + let live = BroadcastStream::new(rx).filter_map(|msg| match msg { + Ok(json) => Some(Ok::<_, std::convert::Infallible>( + format!("data: {}\n\n", json).into_bytes(), + )), + Err(_) => None, // Lagged — skip missed messages + }); + + let stream = bootstrap.chain(live); + + Ok(Response::builder() + .header(header::CONTENT_TYPE, "text/event-stream") + .header(header::CACHE_CONTROL, "no-cache") + .header("X-Accel-Buffering", "no") + .body(Body::from_stream(stream)) + .unwrap()) +} diff --git a/crates/solverforge-cli/templates/list/generic/src/constraints/balanced_load.rs b/crates/solverforge-cli/templates/list/generic/src/constraints/balanced_load.rs new file mode 100644 index 00000000..e931060a --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/constraints/balanced_load.rs @@ -0,0 +1,21 @@ +use crate::domain::{Container, Plan}; +use solverforge::prelude::*; +use solverforge::IncrementalConstraint; +use solverforge::stream::vec; + +/// SOFT: Minimize variance in container load (balanced distribution). +/// +/// Penalizes the square of each container's item count — the solver minimizes +/// the sum, which is equivalent to minimizing variance across containers. +/// +/// Replace or extend this with constraints that reflect your problem's rules. +/// Common additions: capacity limits, ordering requirements, conflict avoidance. +pub fn constraint() -> impl IncrementalConstraint { + ConstraintFactory::::new() + .for_each(vec(|p: &Plan| &p.containers)) + .penalize_with(|c: &Container| { + let load = c.items.len() as i64; + HardSoftScore::of(0, load * load) + }) + .named("Balanced load") +} diff --git a/crates/solverforge-cli/templates/list/generic/src/constraints/mod.rs b/crates/solverforge-cli/templates/list/generic/src/constraints/mod.rs new file mode 100644 index 00000000..53820df0 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/constraints/mod.rs @@ -0,0 +1,21 @@ +/* Constraint definitions. + + Each constraint is a separate function returning an `IncrementalConstraint`. + Assemble them into a tuple — the solver scores the solution against all of them. + + Add as many constraints as your problem needs. The tuple supports up to 12 elements; + use nested tuples for more. */ + +mod balanced_load; + +pub use self::assemble::create_constraints; + +mod assemble { + use super::*; + use crate::domain::Plan; + use solverforge::prelude::*; + + pub fn create_constraints() -> impl ConstraintSet { + (balanced_load::constraint(),) + } +} diff --git a/crates/solverforge-cli/templates/list/generic/src/data/mod.rs b/crates/solverforge-cli/templates/list/generic/src/data/mod.rs new file mode 100644 index 00000000..817967c6 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/data/mod.rs @@ -0,0 +1,49 @@ +/* Demo data for local development. + + Replace this with your own data loading (CSV, JSON, database, …). */ + +use std::str::FromStr; + +use crate::domain::{Container, Item, Plan}; + +/// Available demo datasets. +#[derive(Debug, Clone, Copy)] +pub enum DemoData { + Small, + Standard, +} + +impl FromStr for DemoData { + type Err = (); + + fn from_str(s: &str) -> Result { + match s.to_uppercase().as_str() { + "SMALL" => Ok(DemoData::Small), + "STANDARD" => Ok(DemoData::Standard), + _ => Err(()), + } + } +} + +/// Generates a demo plan for the given dataset. +pub fn generate(demo: DemoData) -> Plan { + match demo { + DemoData::Small => generate_plan(3, 12), + DemoData::Standard => generate_plan(6, 36), + } +} + +fn generate_plan(n_containers: usize, n_items: usize) -> Plan { + let containers: Vec = (0..n_containers) + .map(|i| { + let name = format!("Container {}", (b'A' + i as u8) as char); + Container::new(i, name) + }) + .collect(); + + let item_facts: Vec = (0..n_items) + .map(|i| Item::new(i, format!("Item {}", i + 1))) + .collect(); + + Plan::new(item_facts, containers) +} diff --git a/crates/solverforge-cli/templates/list/generic/src/domain/container.rs b/crates/solverforge-cli/templates/list/generic/src/domain/container.rs new file mode 100644 index 00000000..c535ee62 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/domain/container.rs @@ -0,0 +1,24 @@ +use serde::{Deserialize, Serialize}; +use solverforge::prelude::*; + +/// A container that owns an ordered sequence of items. +/// +/// Rename this to something domain-specific (Worker, Machine, Bin, Lane, …) +/// and add whatever fields describe a capacity or processing unit. +#[planning_entity] +#[derive(Serialize, Deserialize)] +pub struct Container { + #[planning_id] + pub id: usize, + pub name: String, + /// Ordered sequence of item indices assigned to this container. + /// + /// This is the list variable the solver optimizes. + pub items: Vec, +} + +impl Container { + pub fn new(id: usize, name: impl Into) -> Self { + Self { id, name: name.into(), items: Vec::new() } + } +} diff --git a/crates/solverforge-cli/templates/list/generic/src/domain/item.rs b/crates/solverforge-cli/templates/list/generic/src/domain/item.rs new file mode 100644 index 00000000..50c67d41 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/domain/item.rs @@ -0,0 +1,21 @@ +use serde::{Deserialize, Serialize}; +use solverforge::prelude::*; + +/// An item that gets placed into a container. +/// +/// Rename this to something domain-specific (Job, Package, Task, Order, …) +/// and add whatever fields describe a unit of work. +#[problem_fact] +#[derive(Serialize, Deserialize)] +pub struct Item { + pub index: usize, + pub name: String, +} + +impl Item { + pub fn new(index: usize, name: impl Into) -> Self { + Self { index, name: name.into() } + } + + pub fn finalize(&mut self) {} +} diff --git a/crates/solverforge-cli/templates/list/generic/src/domain/mod.rs b/crates/solverforge-cli/templates/list/generic/src/domain/mod.rs new file mode 100644 index 00000000..bdb5a96b --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/domain/mod.rs @@ -0,0 +1,7 @@ +mod container; +mod item; +mod plan; + +pub use container::Container; +pub use item::Item; +pub use plan::Plan; diff --git a/crates/solverforge-cli/templates/list/generic/src/domain/plan.rs b/crates/solverforge-cli/templates/list/generic/src/domain/plan.rs new file mode 100644 index 00000000..3cf356f1 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/domain/plan.rs @@ -0,0 +1,32 @@ +use serde::{Deserialize, Serialize}; +use solverforge::prelude::*; + +use super::{Container, Item}; + +/// The root planning solution: items + containers + score. +/// +/// Rename this to something domain-specific (Route, Schedule, Assignment, …). +#[planning_solution(constraints = "crate::constraints::create_constraints")] +#[shadow_variable_updates( + list_owner = "containers", + list_field = "items", + element_type = "usize", + element_collection = "all_item_indices", +)] +#[derive(Serialize, Deserialize)] +pub struct Plan { + #[problem_fact_collection] + pub item_facts: Vec, + #[planning_entity_collection] + pub containers: Vec, + pub all_item_indices: Vec, + #[planning_score] + pub score: Option, +} + +impl Plan { + pub fn new(item_facts: Vec, containers: Vec) -> Self { + let all_item_indices = (0..item_facts.len()).collect(); + Self { item_facts, containers, all_item_indices, score: None } + } +} diff --git a/crates/solverforge-cli/templates/list/generic/src/lib.rs b/crates/solverforge-cli/templates/list/generic/src/lib.rs new file mode 100644 index 00000000..290adb87 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/lib.rs @@ -0,0 +1,14 @@ +/* {{project_name}} — list variable constraint optimizer built with SolverForge + + Structure: + domain/ — Item (problem fact), Container (planning entity), Plan (solution) + constraints/ — Scoring rules + solver/ — Engine, service, termination config + api/ — HTTP API (axum) + data/ — Demo data / data loading */ + +pub mod api; +pub mod constraints; +pub mod data; +pub mod domain; +pub mod solver; diff --git a/crates/solverforge-cli/templates/list/generic/src/main.rs.tmpl b/crates/solverforge-cli/templates/list/generic/src/main.rs.tmpl new file mode 100644 index 00000000..53185abd --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/main.rs.tmpl @@ -0,0 +1,34 @@ +/* {{project_name}} — list variable optimizer with SolverForge + Run with: solverforge server + Then open: http://localhost:7860 */ + +use {{crate_name}}::api; + +use std::net::SocketAddr; +use std::sync::Arc; +use tower_http::cors::{Any, CorsLayer}; +use tower_http::services::ServeDir; + +#[tokio::main] +async fn main() { + solverforge::init_console(); + + let state = Arc::new(api::AppState::new()); + + let cors = CorsLayer::new() + .allow_origin(Any) + .allow_methods(Any) + .allow_headers(Any); + + let app = api::router(state) + .merge(solverforge_ui::routes()) + .fallback_service(ServeDir::new("static")) + .layer(cors); + + let addr = SocketAddr::from(([0, 0, 0, 0], 7860)); + println!("▸ {{project_name}} listening on http://{}", addr); + println!("▸ Open http://localhost:7860 in your browser\n"); + + let listener = tokio::net::TcpListener::bind(addr).await.unwrap(); + axum::serve(listener, app).await.unwrap(); +} diff --git a/crates/solverforge-cli/templates/list/generic/src/solver/mod.rs b/crates/solverforge-cli/templates/list/generic/src/solver/mod.rs new file mode 100644 index 00000000..bc13c1a4 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/solver/mod.rs @@ -0,0 +1,4 @@ +mod service; + +pub use service::SolverService; +pub use solverforge::SolverStatus; diff --git a/crates/solverforge-cli/templates/list/generic/src/solver/service.rs b/crates/solverforge-cli/templates/list/generic/src/solver/service.rs new file mode 100644 index 00000000..4edd8da5 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/src/solver/service.rs @@ -0,0 +1,138 @@ +use std::collections::HashMap; +use std::sync::Arc; +use parking_lot::RwLock; +use tokio::sync::{broadcast, mpsc}; + +use solverforge::{HardSoftScore, SolverManager, SolverStatus}; + +use crate::domain::Plan; + +// Static manager — must be 'static for SolverManager::solve. +static MANAGER: SolverManager = SolverManager::new(); + +fn sse_payload(score: Option, status: SolverStatus, mps: u64) -> String { + let score_str = score.map(|s| format!("{}", s)); + let status_str = match status { + SolverStatus::Solving => "SOLVING", + SolverStatus::NotSolving => "NOT_SOLVING", + }; + match score_str { + Some(s) => format!(r#"{{"score":"{}","solverStatus":"{}","movesPerSecond":{}}}"#, s, status_str, mps), + None => format!(r#"{{"score":null,"solverStatus":"{}","movesPerSecond":{}}}"#, status_str, mps), + } +} + +struct JobState { + slot_id: usize, + latest: Option, + score: Option, + status: SolverStatus, + sse_tx: broadcast::Sender, +} + +/// Manages solving jobs using the framework SolverManager. +pub struct SolverService { + jobs: Arc>>, +} + +impl SolverService { + pub fn new() -> Self { + Self { jobs: Arc::new(RwLock::new(HashMap::new())) } + } + + pub fn start_solving(&self, id: String, plan: Plan) { + let (slot_id, receiver) = MANAGER.solve(plan); + let (sse_tx, _) = broadcast::channel(64); + let state = JobState { + slot_id, + latest: None, + score: None, + status: SolverStatus::Solving, + sse_tx: sse_tx.clone(), + }; + self.jobs.write().insert(id.clone(), state); + + let jobs = Arc::clone(&self.jobs); + tokio::spawn(async move { + drain_receiver(jobs, id, slot_id, sse_tx, receiver).await; + }); + } + + pub fn with_snapshot( + &self, + id: &str, + f: impl FnOnce(&Plan, Option, SolverStatus) -> R, + ) -> Option { + let jobs = self.jobs.read(); + let state = jobs.get(id)?; + Some(f(state.latest.as_ref()?, state.score, state.status)) + } + + pub fn subscribe(&self, id: &str) -> Option> { + self.jobs.read().get(id).map(|s| s.sse_tx.subscribe()) + } + + pub fn sse_snapshot(&self, id: &str) -> Option { + let jobs = self.jobs.read(); + let state = jobs.get(id)?; + Some(sse_payload(state.score, state.status, 0)) + } + + pub fn has_job(&self, id: &str) -> bool { + self.jobs.read().contains_key(id) + } + + pub fn list_jobs(&self) -> Vec { + self.jobs.read().keys().cloned().collect() + } + + pub fn stop_solving(&self, id: &str) -> bool { + let jobs = self.jobs.read(); + if let Some(state) = jobs.get(id) { + return MANAGER.terminate_early(state.slot_id); + } + false + } + + pub fn remove_job(&self, id: &str) -> bool { + if let Some(state) = self.jobs.write().remove(id) { + MANAGER.free_slot(state.slot_id); + return true; + } + false + } +} + +async fn drain_receiver( + jobs: Arc>>, + id: String, + slot_id: usize, + sse_tx: broadcast::Sender, + mut receiver: mpsc::UnboundedReceiver<(Plan, HardSoftScore, u64)>, +) { + let mut last_mps = 0u64; + while let Some((solution, score, mps)) = receiver.recv().await { + last_mps = mps; + let _ = sse_tx.send(sse_payload(Some(score), SolverStatus::Solving, mps)); + let mut jobs = jobs.write(); + if let Some(state) = jobs.get_mut(&id) { + state.latest = Some(solution); + state.score = Some(score); + } + } + let _ = sse_tx.send(sse_payload( + jobs.read().get(&id).and_then(|s| s.score), + SolverStatus::NotSolving, + last_mps, + )); + let mut jobs = jobs.write(); + if let Some(state) = jobs.get_mut(&id) { + state.status = MANAGER.get_status(slot_id); + } +} + +impl Default for SolverService { + fn default() -> Self { + Self::new() + } +} diff --git a/crates/solverforge-cli/templates/list/generic/static/app.js b/crates/solverforge-cli/templates/list/generic/static/app.js new file mode 100644 index 00000000..87440bc7 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/static/app.js @@ -0,0 +1,175 @@ +/* app.js — {{project_name}} SolverForge UI */ + +(async function () { + 'use strict'; + + var config = await fetch('/sf-config.json').then(function (r) { return r.json(); }); + + var app = document.getElementById('sf-app'); + + // Backend and solver + var backend = SF.createBackend({ baseUrl: '' }); + var statusBar = SF.createStatusBar({ constraints: config.constraints }); + var solver = SF.createSolver({ + backend: backend, + statusBar: statusBar, + onUpdate: function (data) { renderSequences(data); renderTables(data); }, + onComplete: function (data) { renderSequences(data); renderTables(data); }, + }); + + // Header + var header = SF.createHeader({ + logo: '/sf/img/solverforge-horizontal.svg', + title: config.title, + subtitle: config.subtitle, + tabs: [ + { id: 'sequences', label: 'Sequences', icon: 'fa-list-ol', active: true }, + { id: 'data', label: 'Data', icon: 'fa-table' }, + { id: 'api', label: 'REST API', icon: 'fa-book' }, + ], + actions: { + onSolve: function () { loadAndSolve(); }, + onStop: function () { solver.stop(); }, + onAnalyze: function () { openAnalysis(); }, + }, + onTabChange: function (tab) { + sequencesPanel.style.display = tab === 'sequences' ? '' : 'none'; + dataPanel.style.display = tab === 'data' ? '' : 'none'; + apiPanel.style.display = tab === 'api' ? '' : 'none'; + }, + }); + app.appendChild(header); + app.appendChild(statusBar.el); + + // Sequences panel (hero) + var sequencesPanel = SF.el('div', { className: 'sf-content' }); + var sequencesContainer = SF.el('div', { id: 'sf-sequences' }); + sequencesPanel.appendChild(sequencesContainer); + app.appendChild(sequencesPanel); + + // Data panel + var dataPanel = SF.el('div', { className: 'sf-content', style: { display: 'none' } }); + var tablesContainer = SF.el('div', { id: 'sf-tables' }); + dataPanel.appendChild(tablesContainer); + app.appendChild(dataPanel); + + // API panel + var apiPanel = SF.el('div', { className: 'sf-content', style: { display: 'none' } }); + var guide = SF.createApiGuide({ + endpoints: [ + { method: 'GET', path: '/demo-data/STANDARD', description: 'Fetch demo data', curl: 'curl http://localhost:7860/demo-data/STANDARD' }, + { method: 'POST', path: '/schedules', description: 'Submit a plan for solving', curl: 'curl -X POST -H "Content-Type: application/json" http://localhost:7860/schedules -d @plan.json' }, + { method: 'GET', path: '/schedules/{id}', description: 'Get current best solution', curl: 'curl http://localhost:7860/schedules/{id}' }, + { method: 'GET', path: '/schedules/{id}/events', description: 'Stream solver updates (SSE)', curl: 'curl -N http://localhost:7860/schedules/{id}/events' }, + { method: 'GET', path: '/schedules/{id}/analyze', description: 'Get constraint analysis', curl: 'curl http://localhost:7860/schedules/{id}/analyze' }, + { method: 'DELETE', path: '/schedules/{id}', description: 'Stop solving and remove job', curl: 'curl -X DELETE http://localhost:7860/schedules/{id}' }, + ], + }); + apiPanel.appendChild(guide); + app.appendChild(apiPanel); + + // Footer + var footer = SF.createFooter({ + links: [ + { label: 'SolverForge', url: 'https://www.solverforge.org' }, + { label: 'Docs', url: 'https://www.solverforge.org/docs' }, + ], + }); + app.appendChild(footer); + + // Analysis modal + var analysisModal = SF.createModal({ title: 'Score Analysis', width: '700px' }); + + // Load demo data on startup + fetch('/demo-data/STANDARD') + .then(function (r) { return r.json(); }) + .then(function (data) { renderSequences(data); renderTables(data); }) + .catch(function () {}); + + function loadAndSolve() { + fetch('/demo-data/STANDARD') + .then(function (r) { return r.json(); }) + .then(function (data) { solver.start(data); }) + .catch(function (err) { console.error('Demo load failed:', err); }); + } + + function openAnalysis() { + var id = solver.getJobId(); + if (!id) return; + backend.analyze(id) + .then(function (analysis) { + analysisModal.setBody(buildAnalysisHtml(analysis)); + analysisModal.open(); + }) + .catch(function () {}); + } + + function buildAnalysisHtml(analysis) { + if (!analysis || !analysis.constraints) return '

No analysis available.

'; + var html = '

Score: ' + SF.escHtml(analysis.score) + '

'; + html += ''; + analysis.constraints.forEach(function (c) { + html += ''; + }); + html += '
ConstraintTypeScoreMatches
' + SF.escHtml(c.name) + '' + SF.escHtml(c.type) + '' + SF.escHtml(c.score) + '' + (c.matches ? c.matches.length : 0) + '
'; + return html; + } + + function renderSequences(data) { + sequencesContainer.innerHTML = ''; + var containers = data.containers || []; + if (!containers.length) return; + + var cols = ['Container', 'Item Sequence', 'Count']; + var rows = containers.map(function (c) { + var seq = (c.items || []).join(' → ') || '—'; + return [c.name, seq, String((c.items || []).length)]; + }); + + var section = SF.el('div', { className: 'sf-section' }); + section.appendChild(SF.createTable({ columns: cols, rows: rows })); + sequencesContainer.appendChild(section); + } + + function renderTables(data) { + tablesContainer.innerHTML = ''; + + config.entities.forEach(function (entity) { + var items = data[entity.plural] || data[entity.name + 's'] || []; + if (!items.length) return; + var cols = Object.keys(items[0]); + var rows = items.map(function (item) { + return cols.map(function (k) { + var v = item[k]; + if (v === null || v === undefined) return '—'; + if (Array.isArray(v)) return v.join(', '); + if (typeof v === 'object') return JSON.stringify(v); + return String(v); + }); + }); + var section = SF.el('div', { className: 'sf-section' }); + section.appendChild(SF.el('h3', null, entity.label)); + section.appendChild(SF.createTable({ columns: cols, rows: rows })); + tablesContainer.appendChild(section); + }); + + config.facts.forEach(function (fact) { + var items = data[fact.plural] || data[fact.name + 's'] || []; + if (!items.length) return; + var cols = Object.keys(items[0]); + var rows = items.map(function (item) { + return cols.map(function (k) { + var v = item[k]; + if (v === null || v === undefined) return '—'; + if (typeof v === 'object') return JSON.stringify(v); + return String(v); + }); + }); + var section = SF.el('div', { className: 'sf-section' }); + section.appendChild(SF.el('h3', null, fact.label)); + section.appendChild(SF.createTable({ columns: cols, rows: rows })); + tablesContainer.appendChild(section); + }); + } + +})(); diff --git a/crates/solverforge-cli/templates/list/generic/static/index.html b/crates/solverforge-cli/templates/list/generic/static/index.html new file mode 100644 index 00000000..a808a71d --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/static/index.html @@ -0,0 +1,17 @@ + + + + + + {{project_name}} — SolverForge + + + + + + +
+ + + + diff --git a/crates/solverforge-cli/templates/list/generic/static/sf-config.json b/crates/solverforge-cli/templates/list/generic/static/sf-config.json new file mode 100644 index 00000000..6bcb6047 --- /dev/null +++ b/crates/solverforge-cli/templates/list/generic/static/sf-config.json @@ -0,0 +1,13 @@ +{ + "title": "{{project_name}}", + "subtitle": "List Variable Optimizer", + "constraints": ["balanced_load"], + "entities": [{ "name": "container", "label": "Containers", "plural": "containers" }], + "facts": [{ "name": "item", "label": "Items", "plural": "items" }], + "view": { + "type": "table", + "resource": "container", + "task": "item", + "fields": { "start": "index", "end": "index", "label": "name" } + } +} diff --git a/crates/solverforge-cli/tests/scaffold_test.rs b/crates/solverforge-cli/tests/scaffold_test.rs index a4121625..1af857b1 100644 --- a/crates/solverforge-cli/tests/scaffold_test.rs +++ b/crates/solverforge-cli/tests/scaffold_test.rs @@ -60,7 +60,7 @@ fn pin_generated_project_to_local_solverforge(project_dir: &std::path::Path) { std::fs::write(&cargo_toml, updated).expect("failed to update scaffold Cargo.toml"); } -// Scaffold a basic project and verify the expected files are created. +// Scaffold a basic project and verify the enriched generic files are created. #[test] fn test_new_basic_creates_project_files() { let tmp = tempfile::tempdir().expect("failed to create temp dir"); @@ -96,6 +96,40 @@ fn test_new_basic_creates_project_files() { project_dir.join("solver.toml").exists(), "solver.toml missing" ); + assert!( + project_dir.join("static").join("sf-config.json").exists(), + "static/sf-config.json missing" + ); + + let sf_config = + std::fs::read_to_string(project_dir.join("static").join("sf-config.json")).unwrap(); + assert!( + sf_config.contains("\"type\": \"assignment_board\""), + "basic scaffold should default to assignment_board view: {}", + sf_config + ); + assert!( + sf_config.contains("\"balanced_load\""), + "basic scaffold should wire balanced_load into sf-config.json: {}", + sf_config + ); + assert!( + sf_config.contains("\"capacity_limit\"") && sf_config.contains("\"affinity_match\""), + "basic scaffold should wire enriched demo constraints into sf-config.json: {}", + sf_config + ); + + let app_js = std::fs::read_to_string(project_dir.join("static").join("app.js")).unwrap(); + assert!( + app_js.contains("Assignment Overview"), + "basic scaffold should render assignment board hero: {}", + app_js + ); + assert!( + app_js.contains("renderAssignmentBoard"), + "basic scaffold should include assignment board rendering: {}", + app_js + ); } // Scaffold an employee-scheduling project and verify the domain files are created. @@ -130,17 +164,17 @@ fn test_new_employee_scheduling_creates_domain() { assert!(constraints_dir.exists(), "src/constraints/ missing"); } -// Scaffold a vehicle-routing project and verify the domain files are created. +// Scaffold a generic list project and verify the domain files are created. #[test] -fn test_new_vehicle_routing_creates_domain() { +fn test_new_list_creates_domain() { let tmp = tempfile::tempdir().expect("failed to create temp dir"); - let project_name = "test_vr_project"; + let project_name = "test_list_project"; let status = Command::new(cli_bin()) .args([ "new", project_name, - "--list=vehicle-routing", + "--list", "--skip-git", "--skip-readme", "--quiet", @@ -149,42 +183,47 @@ fn test_new_vehicle_routing_creates_domain() { .status() .expect("failed to run solverforge new"); - assert!( - status.success(), - "solverforge new --list=vehicle-routing failed" - ); + assert!(status.success(), "solverforge new --list failed"); let project_dir = tmp.path().join(project_name); assert!( project_dir.join("Cargo.toml").exists(), "Cargo.toml missing" ); + assert!( + project_dir.join("static").join("sf-config.json").exists(), + "static/sf-config.json missing" + ); } -// Bare --list should be rejected because the generic list scaffold is not supported. +// Scaffold a vehicle-routing project and verify the domain files are created. #[test] -fn test_new_list_requires_specialization() { +fn test_new_vehicle_routing_creates_domain() { let tmp = tempfile::tempdir().expect("failed to create temp dir"); + let project_name = "test_vr_project"; - let output = Command::new(cli_bin()) - .args(["new", "test_list_project", "--list"]) + let status = Command::new(cli_bin()) + .args([ + "new", + project_name, + "--list=vehicle-routing", + "--skip-git", + "--skip-readme", + "--quiet", + ]) .current_dir(tmp.path()) - .output() + .status() .expect("failed to run solverforge new"); assert!( - !output.status.success(), - "solverforge new --list unexpectedly succeeded" + status.success(), + "solverforge new --list=vehicle-routing failed" ); - let stderr = String::from_utf8_lossy(&output.stderr); - assert!( - stderr.contains("the --list template requires a specialization"), - "unexpected stderr: {stderr}" - ); + let project_dir = tmp.path().join(project_name); assert!( - stderr.contains("Use --list=vehicle-routing"), - "unexpected stderr: {stderr}" + project_dir.join("Cargo.toml").exists(), + "Cargo.toml missing" ); } @@ -226,6 +265,40 @@ fn test_new_basic_cargo_check_passes() { ); } +// Full cargo check on a scaffolded generic list project. +// Run with: cargo test -p solverforge-cli -- --ignored +#[test] +#[ignore = "invokes cargo check in a temp dir; requires network + toolchain; run with --ignored"] +fn test_new_list_cargo_check_passes() { + let tmp = tempfile::tempdir().expect("failed to create temp dir"); + let project_name = "test_cargo_check_list"; + + let scaffold_status = Command::new(cli_bin()) + .args([ + "new", + project_name, + "--list", + "--skip-git", + "--skip-readme", + "--quiet", + ]) + .current_dir(tmp.path()) + .status() + .expect("failed to run solverforge new"); + + assert!(scaffold_status.success(), "scaffolding failed"); + + let project_dir = tmp.path().join(project_name); + pin_generated_project_to_local_solverforge(&project_dir); + let check_status = Command::new("cargo") + .arg("check") + .current_dir(&project_dir) + .status() + .expect("failed to run cargo check"); + + assert!(check_status.success(), "cargo check failed on list project"); +} + // Full cargo check on a scaffolded employee-scheduling project. // Run with: cargo test -p solverforge-cli -- --ignored #[test] From 12792399cfb8b03d9f008ea6cabd5a1293f520b3 Mon Sep 17 00:00:00 2001 From: Vittorio Distefano Date: Sat, 21 Mar 2026 20:49:32 +0100 Subject: [PATCH 2/2] fix(cli): make salvaged scaffolds use published solverforge-ui --- .../templates/basic/generic/Cargo.toml.tmpl | 2 +- .../generic/src/constraints/affinity_match.rs | 143 +++++++++++++++--- .../templates/basic/generic/src/main.rs.tmpl | 2 +- .../basic/generic/src/solver/service.rs | 9 +- .../templates/list/generic/Cargo.toml.tmpl | 2 +- .../templates/list/generic/src/domain/mod.rs | 2 +- .../templates/list/generic/src/domain/plan.rs | 37 +++++ .../templates/list/generic/src/main.rs.tmpl | 2 +- .../list/generic/src/solver/service.rs | 9 +- crates/solverforge-cli/tests/scaffold_test.rs | 26 +++- 10 files changed, 193 insertions(+), 41 deletions(-) diff --git a/crates/solverforge-cli/templates/basic/generic/Cargo.toml.tmpl b/crates/solverforge-cli/templates/basic/generic/Cargo.toml.tmpl index 58473380..387eaa74 100644 --- a/crates/solverforge-cli/templates/basic/generic/Cargo.toml.tmpl +++ b/crates/solverforge-cli/templates/basic/generic/Cargo.toml.tmpl @@ -10,7 +10,7 @@ path = "src/main.rs" [dependencies] solverforge = { version = "{{solverforge_version}}", features = ["serde", "console", "verbose-logging"] } -solverforge-ui = { path = "/home/pvd/dev/solverforge/solverforge-ui" } +solverforge-ui = "0.2.0" # Web server axum = "0.8" tokio = { version = "1", features = ["full"] } diff --git a/crates/solverforge-cli/templates/basic/generic/src/constraints/affinity_match.rs b/crates/solverforge-cli/templates/basic/generic/src/constraints/affinity_match.rs index a39b4985..764358a5 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/constraints/affinity_match.rs +++ b/crates/solverforge-cli/templates/basic/generic/src/constraints/affinity_match.rs @@ -1,26 +1,129 @@ -use crate::domain::{Plan, Task}; +use crate::domain::Plan; use solverforge::prelude::*; -use solverforge::stream::joiner::equal_bi; -use solverforge::stream::vec; use solverforge::IncrementalConstraint; /// SOFT: Prefer assignments whose affinity group matches the task preference. +pub struct AffinityMatchConstraint { + penalties: Vec, +} + +impl AffinityMatchConstraint { + pub fn new() -> Self { + Self { + penalties: Vec::new(), + } + } + + fn ensure_shape(&mut self, solution: &Plan) { + if self.penalties.len() != solution.tasks.len() { + self.penalties = vec![0; solution.tasks.len()]; + } + } + + fn penalty(solution: &Plan, entity_index: usize) -> i64 { + let Some(task) = solution.tasks.get(entity_index) else { + return 0; + }; + let Some(resource_idx) = task.resource_idx else { + return 0; + }; + let Some(resource) = solution.resources.get(resource_idx) else { + return 0; + }; + + if task.preferred_group == resource.affinity_group { + 0 + } else { + task.demand + } + } + + fn total_penalty(solution: &Plan) -> i64 { + solution + .tasks + .iter() + .enumerate() + .map(|(entity_index, _)| Self::penalty(solution, entity_index)) + .sum() + } + + fn update_penalty(&mut self, solution: &Plan, entity_index: usize) -> HardSoftScore { + self.ensure_shape(solution); + if entity_index >= self.penalties.len() { + return HardSoftScore::ZERO; + } + + let before = self.penalties[entity_index]; + let after = Self::penalty(solution, entity_index); + self.penalties[entity_index] = after; + + HardSoftScore::of(0, -(after - before)) + } +} + +impl IncrementalConstraint for AffinityMatchConstraint { + fn evaluate(&self, solution: &Plan) -> HardSoftScore { + HardSoftScore::of(0, -Self::total_penalty(solution)) + } + + fn match_count(&self, solution: &Plan) -> usize { + solution + .tasks + .iter() + .enumerate() + .filter(|(entity_index, _)| Self::penalty(solution, *entity_index) > 0) + .count() + } + + fn initialize(&mut self, solution: &Plan) -> HardSoftScore { + self.ensure_shape(solution); + for (entity_index, penalty) in self.penalties.iter_mut().enumerate() { + *penalty = Self::penalty(solution, entity_index); + } + HardSoftScore::of(0, -self.penalties.iter().sum::()) + } + + fn on_insert( + &mut self, + solution: &Plan, + entity_index: usize, + descriptor_index: usize, + ) -> HardSoftScore { + if descriptor_index != 0 { + return HardSoftScore::ZERO; + } + self.update_penalty(solution, entity_index) + } + + fn on_retract( + &mut self, + solution: &Plan, + entity_index: usize, + descriptor_index: usize, + ) -> HardSoftScore { + if descriptor_index != 0 { + return HardSoftScore::ZERO; + } + self.update_penalty(solution, entity_index) + } + + fn reset(&mut self) { + self.penalties.clear(); + } + + fn name(&self) -> &str { + "Affinity match" + } + + fn is_hard(&self) -> bool { + false + } + + fn weight(&self) -> HardSoftScore { + HardSoftScore::ONE_SOFT + } +} + pub fn constraint() -> impl IncrementalConstraint { - ConstraintFactory::::new() - .for_each(vec(|p: &Plan| &p.tasks)) - .join(( - vec(|p: &Plan| &p.resources), - equal_bi( - |task: &Task| task.resource_idx, - |resource: &crate::domain::Resource| Some(resource.index), - ), - )) - .penalize_with(|task: &Task, resource: &crate::domain::Resource| { - if task.preferred_group == resource.affinity_group { - HardSoftScore::ZERO - } else { - HardSoftScore::of(0, task.demand) - } - }) - .named("Affinity match") + AffinityMatchConstraint::new() } diff --git a/crates/solverforge-cli/templates/basic/generic/src/main.rs.tmpl b/crates/solverforge-cli/templates/basic/generic/src/main.rs.tmpl index 72abb362..5b52284a 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/main.rs.tmpl +++ b/crates/solverforge-cli/templates/basic/generic/src/main.rs.tmpl @@ -11,7 +11,7 @@ use tower_http::services::ServeDir; #[tokio::main] async fn main() { - solverforge::init_console(); + solverforge::__internal::init_console(); let state = Arc::new(api::AppState::new()); diff --git a/crates/solverforge-cli/templates/basic/generic/src/solver/service.rs b/crates/solverforge-cli/templates/basic/generic/src/solver/service.rs index 4edd8da5..0f7f0af3 100644 --- a/crates/solverforge-cli/templates/basic/generic/src/solver/service.rs +++ b/crates/solverforge-cli/templates/basic/generic/src/solver/service.rs @@ -108,12 +108,11 @@ async fn drain_receiver( id: String, slot_id: usize, sse_tx: broadcast::Sender, - mut receiver: mpsc::UnboundedReceiver<(Plan, HardSoftScore, u64)>, + mut receiver: mpsc::UnboundedReceiver<(Plan, HardSoftScore)>, ) { - let mut last_mps = 0u64; - while let Some((solution, score, mps)) = receiver.recv().await { - last_mps = mps; - let _ = sse_tx.send(sse_payload(Some(score), SolverStatus::Solving, mps)); + let last_mps = 0u64; + while let Some((solution, score)) = receiver.recv().await { + let _ = sse_tx.send(sse_payload(Some(score), SolverStatus::Solving, last_mps)); let mut jobs = jobs.write(); if let Some(state) = jobs.get_mut(&id) { state.latest = Some(solution); diff --git a/crates/solverforge-cli/templates/list/generic/Cargo.toml.tmpl b/crates/solverforge-cli/templates/list/generic/Cargo.toml.tmpl index 58473380..387eaa74 100644 --- a/crates/solverforge-cli/templates/list/generic/Cargo.toml.tmpl +++ b/crates/solverforge-cli/templates/list/generic/Cargo.toml.tmpl @@ -10,7 +10,7 @@ path = "src/main.rs" [dependencies] solverforge = { version = "{{solverforge_version}}", features = ["serde", "console", "verbose-logging"] } -solverforge-ui = { path = "/home/pvd/dev/solverforge/solverforge-ui" } +solverforge-ui = "0.2.0" # Web server axum = "0.8" tokio = { version = "1", features = ["full"] } diff --git a/crates/solverforge-cli/templates/list/generic/src/domain/mod.rs b/crates/solverforge-cli/templates/list/generic/src/domain/mod.rs index bdb5a96b..f57962b0 100644 --- a/crates/solverforge-cli/templates/list/generic/src/domain/mod.rs +++ b/crates/solverforge-cli/templates/list/generic/src/domain/mod.rs @@ -4,4 +4,4 @@ mod plan; pub use container::Container; pub use item::Item; -pub use plan::Plan; +pub use plan::{ItemIndexDistanceMeter, Plan}; diff --git a/crates/solverforge-cli/templates/list/generic/src/domain/plan.rs b/crates/solverforge-cli/templates/list/generic/src/domain/plan.rs index 3cf356f1..9bd0e7b5 100644 --- a/crates/solverforge-cli/templates/list/generic/src/domain/plan.rs +++ b/crates/solverforge-cli/templates/list/generic/src/domain/plan.rs @@ -1,5 +1,6 @@ use serde::{Deserialize, Serialize}; use solverforge::prelude::*; +use solverforge::CrossEntityDistanceMeter; use super::{Container, Item}; @@ -12,6 +13,8 @@ use super::{Container, Item}; list_field = "items", element_type = "usize", element_collection = "all_item_indices", + distance_meter = "crate::domain::ItemIndexDistanceMeter", + intra_distance_meter = "crate::domain::ItemIndexDistanceMeter", )] #[derive(Serialize, Deserialize)] pub struct Plan { @@ -30,3 +33,37 @@ impl Plan { Self { item_facts, containers, all_item_indices, score: None } } } + +/// Simple cross-entity meter for the generic list scaffold. +/// +/// Uses item index distance so nearby move selectors can rank positions +/// without relying on a domain-specific metric. +#[derive(Clone, Default)] +pub struct ItemIndexDistanceMeter; + +impl CrossEntityDistanceMeter for ItemIndexDistanceMeter { + fn distance( + &self, + solution: &Plan, + src_entity: usize, + src_pos: usize, + dst_entity: usize, + dst_pos: usize, + ) -> f64 { + let src_item = solution + .containers + .get(src_entity) + .and_then(|container| container.items.get(src_pos)) + .copied(); + let dst_item = solution + .containers + .get(dst_entity) + .and_then(|container| container.items.get(dst_pos)) + .copied(); + + match (src_item, dst_item) { + (Some(src), Some(dst)) => src.abs_diff(dst) as f64, + _ => f64::INFINITY, + } + } +} diff --git a/crates/solverforge-cli/templates/list/generic/src/main.rs.tmpl b/crates/solverforge-cli/templates/list/generic/src/main.rs.tmpl index 53185abd..2d641a0a 100644 --- a/crates/solverforge-cli/templates/list/generic/src/main.rs.tmpl +++ b/crates/solverforge-cli/templates/list/generic/src/main.rs.tmpl @@ -11,7 +11,7 @@ use tower_http::services::ServeDir; #[tokio::main] async fn main() { - solverforge::init_console(); + solverforge::__internal::init_console(); let state = Arc::new(api::AppState::new()); diff --git a/crates/solverforge-cli/templates/list/generic/src/solver/service.rs b/crates/solverforge-cli/templates/list/generic/src/solver/service.rs index 4edd8da5..0f7f0af3 100644 --- a/crates/solverforge-cli/templates/list/generic/src/solver/service.rs +++ b/crates/solverforge-cli/templates/list/generic/src/solver/service.rs @@ -108,12 +108,11 @@ async fn drain_receiver( id: String, slot_id: usize, sse_tx: broadcast::Sender, - mut receiver: mpsc::UnboundedReceiver<(Plan, HardSoftScore, u64)>, + mut receiver: mpsc::UnboundedReceiver<(Plan, HardSoftScore)>, ) { - let mut last_mps = 0u64; - while let Some((solution, score, mps)) = receiver.recv().await { - last_mps = mps; - let _ = sse_tx.send(sse_payload(Some(score), SolverStatus::Solving, mps)); + let last_mps = 0u64; + while let Some((solution, score)) = receiver.recv().await { + let _ = sse_tx.send(sse_payload(Some(score), SolverStatus::Solving, last_mps)); let mut jobs = jobs.write(); if let Some(state) = jobs.get_mut(&id) { state.latest = Some(solution); diff --git a/crates/solverforge-cli/tests/scaffold_test.rs b/crates/solverforge-cli/tests/scaffold_test.rs index 1af857b1..867610bc 100644 --- a/crates/solverforge-cli/tests/scaffold_test.rs +++ b/crates/solverforge-cli/tests/scaffold_test.rs @@ -44,15 +44,29 @@ fn pin_generated_project_to_local_solverforge(project_dir: &std::path::Path) { let manifest = std::fs::read_to_string(&cargo_toml).expect("failed to read scaffold Cargo.toml"); let solverforge_path = workspace_root().join("crates").join("solverforge"); - let replacement = format!( - "solverforge = {{ path = {:?}, features = [\"serde\"] }}", + let standard_replacement = format!( + "solverforge = {{ path = {:?}, features = [\"serde\", \"console\", \"verbose-logging\"] }}", solverforge_path ); - let updated = manifest.replacen( - "solverforge = { version = \"0.5.19\", features = [\"serde\"] }", - &replacement, - 1, + let basic_replacement = format!( + "solverforge = {{ path = {:?}, features = [\"serde\"] }}", + solverforge_path ); + let updated = if manifest.contains( + "solverforge = { version = \"0.5.19\", features = [\"serde\", \"console\", \"verbose-logging\"] }", + ) { + manifest.replacen( + "solverforge = { version = \"0.5.19\", features = [\"serde\", \"console\", \"verbose-logging\"] }", + &standard_replacement, + 1, + ) + } else { + manifest.replacen( + "solverforge = { version = \"0.5.19\", features = [\"serde\"] }", + &basic_replacement, + 1, + ) + }; assert_ne!( manifest, updated, "failed to rewrite scaffold dependency to local solverforge path"