blackopsrepl commited on
Commit
7b2a37a
·
0 Parent(s):

Initial commit from solverforge new

Browse files
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ /target
2
+ **/*.rs.bk
Cargo.toml ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [package]
2
+ name = "solverforge-fsr"
3
+ version = "0.1.0"
4
+ edition = "2021"
5
+ rust-version = "1.95"
6
+ description = "Constraint optimizer built with SolverForge"
7
+
8
+ [[bin]]
9
+ name = "solverforge_fsr"
10
+ path = "src/main.rs"
11
+
12
+ [dependencies]
13
+ solverforge = { version = "0.10.0", features = ["serde", "console", "verbose-logging"] }
14
+ solverforge-ui = { version = "0.6.4" }
15
+ solverforge-maps = { version = "2.1.3" }
16
+ # Web server
17
+ axum = "0.8.9"
18
+ tokio = { version = "1.52.1", features = ["full"] }
19
+ tokio-stream = { version = "0.1.18", features = ["sync"] }
20
+ tower-http = { version = "0.6.8", features = ["fs", "cors"] }
21
+ tower = "0.5.3"
22
+
23
+ # Serialization
24
+ serde = { version = "1.0.228", features = ["derive"] }
25
+ serde_json = "1.0.149"
26
+
27
+ # Utilities
28
+ uuid = { version = "1.23.1", features = ["v4", "serde"] }
29
+ parking_lot = "0.12.5"
README.md ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # solverforge-fsr
2
+
3
+ A SolverForge constraint optimization project (scaffold: `neutral scaffold`).
4
+
5
+ ## Versioning
6
+
7
+ - CLI version used to scaffold this project: `2.0.2`
8
+ - SolverForge runtime target for this scaffold: `solverforge 0.10.0`
9
+ - SolverForge UI target for this scaffold: `solverforge-ui 0.6.4`
10
+ - SolverForge maps target for this scaffold: `solverforge-maps 2.1.3`
11
+ - Runtime dependency currently wired into `Cargo.toml`: `crates.io: solverforge 0.10.0`
12
+ - Frontend UI dependency currently wired into `Cargo.toml`: `crates.io: solverforge-ui 0.6.4`
13
+ - Maps dependency currently wired into `Cargo.toml`: `crates.io: solverforge-maps 2.1.3`
14
+
15
+ This project was scaffolded by `solverforge-cli`, and it currently targets `SolverForge crate target 0.10.0` through the configured crate dependency targets.
16
+
17
+ ## Quick Start
18
+
19
+ ```bash
20
+ # Start the solver server
21
+ solverforge server
22
+
23
+ # Or run directly
24
+ cargo run --release
25
+ ```
26
+
27
+ ## Development
28
+
29
+ ```bash
30
+ # Add a new constraint
31
+ solverforge generate constraint my_rule --unary --hard
32
+
33
+ # Add a domain entity
34
+ solverforge generate entity worker --planning-variable shift_idx
35
+
36
+ # Add a problem fact
37
+ solverforge generate fact location
38
+
39
+ # Remove a resource
40
+ solverforge destroy constraint my_rule
41
+ ```
42
+
43
+ ## Project Structure
44
+
45
+ | Directory | Purpose |
46
+ |-----------|--------|
47
+ | `src/domain/` | Planning entities, facts, and solution struct |
48
+ | `src/constraints/` | Constraint definitions (scored by the solver) |
49
+ | `src/solver/` | Solver service and configuration |
50
+ | `src/api/` | HTTP routes and DTOs |
51
+ | `src/data/` | Data loading and generation |
52
+ | `solverforge.app.toml` | Scaffolded app/domain contract |
53
+ | `solver.toml` | Solver configuration (termination, phases) |
solver.toml ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [[phases]]
2
+ type = "construction_heuristic"
3
+ construction_heuristic_type = "first_fit"
4
+
5
+ [[phases]]
6
+ type = "local_search"
7
+ [phases.acceptor]
8
+ type = "late_acceptance"
9
+ late_acceptance_size = 400
10
+ [phases.forager]
11
+ type = "accepted_count"
12
+ limit = 4
13
+
14
+ [termination]
15
+ seconds_spent_limit = 30
solverforge.app.toml ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [app]
2
+ name = "solverforge-fsr"
3
+ starter = "neutral-shell"
4
+ cli_version = "2.0.2"
5
+
6
+ [runtime]
7
+ target = "solverforge 0.10.0"
8
+ runtime_source = "crates.io: solverforge 0.10.0"
9
+ ui_source = "crates.io: solverforge-ui 0.6.4"
10
+
11
+ [demo]
12
+ default_size = "standard"
13
+ available_sizes = ["small", "standard", "large"]
14
+
15
+ [solution]
16
+ name = "Plan"
17
+ score = "HardSoftScore"
src/api/dto.rs ADDED
@@ -0,0 +1,247 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ use serde::{Deserialize, Serialize};
2
+ use serde_json::{Map, Value};
3
+ use solverforge::{
4
+ HardSoftScore, SolverLifecycleState, SolverSnapshot, SolverSnapshotAnalysis, SolverStatus,
5
+ SolverTelemetry, SolverTerminalReason,
6
+ };
7
+ use std::time::Duration;
8
+
9
+ use crate::domain::Plan;
10
+
11
+ #[derive(Debug, Clone, Serialize, Deserialize)]
12
+ #[serde(rename_all = "camelCase")]
13
+ pub struct PlanDto {
14
+ #[serde(flatten)]
15
+ pub fields: Map<String, Value>,
16
+ #[serde(default)]
17
+ pub score: Option<String>,
18
+ }
19
+
20
+ /// Constraint analysis result.
21
+ #[derive(Debug, Clone, Serialize)]
22
+ #[serde(rename_all = "camelCase")]
23
+ pub struct ConstraintAnalysisDto {
24
+ pub name: String,
25
+ pub weight: String,
26
+ pub score: String,
27
+ pub match_count: usize,
28
+ }
29
+
30
+ #[derive(Debug, Clone, Serialize)]
31
+ #[serde(rename_all = "camelCase")]
32
+ pub struct AnalyzeResponse {
33
+ pub score: String,
34
+ pub constraints: Vec<ConstraintAnalysisDto>,
35
+ }
36
+
37
+ #[derive(Debug, Clone, Copy, Serialize)]
38
+ #[serde(rename_all = "camelCase")]
39
+ pub struct TelemetryDto {
40
+ pub elapsed_ms: u64,
41
+ pub step_count: u64,
42
+ pub moves_generated: u64,
43
+ pub moves_evaluated: u64,
44
+ pub moves_accepted: u64,
45
+ pub score_calculations: u64,
46
+ pub generation_ms: u64,
47
+ pub evaluation_ms: u64,
48
+ pub moves_per_second: u64,
49
+ pub acceptance_rate: f64,
50
+ }
51
+
52
+ #[derive(Debug, Clone, Serialize)]
53
+ #[serde(rename_all = "camelCase")]
54
+ pub struct JobSummaryDto {
55
+ pub id: String,
56
+ pub job_id: String,
57
+ pub lifecycle_state: &'static str,
58
+ pub terminal_reason: Option<&'static str>,
59
+ pub checkpoint_available: bool,
60
+ pub event_sequence: u64,
61
+ pub snapshot_revision: Option<u64>,
62
+ pub current_score: Option<String>,
63
+ pub best_score: Option<String>,
64
+ pub telemetry: TelemetryDto,
65
+ }
66
+
67
+ #[derive(Debug, Clone, Serialize)]
68
+ #[serde(rename_all = "camelCase")]
69
+ pub struct JobSnapshotDto {
70
+ pub id: String,
71
+ pub job_id: String,
72
+ pub snapshot_revision: u64,
73
+ pub lifecycle_state: &'static str,
74
+ pub terminal_reason: Option<&'static str>,
75
+ pub current_score: Option<String>,
76
+ pub best_score: Option<String>,
77
+ pub telemetry: TelemetryDto,
78
+ pub solution: PlanDto,
79
+ }
80
+
81
+ #[derive(Debug, Clone, Serialize)]
82
+ #[serde(rename_all = "camelCase")]
83
+ pub struct JobAnalysisDto {
84
+ pub id: String,
85
+ pub job_id: String,
86
+ pub snapshot_revision: u64,
87
+ pub lifecycle_state: &'static str,
88
+ pub terminal_reason: Option<&'static str>,
89
+ pub analysis: AnalyzeResponse,
90
+ }
91
+
92
+ impl PlanDto {
93
+ pub fn from_plan(plan: &Plan) -> Self {
94
+ let mut fields = match serde_json::to_value(plan).expect("failed to serialize plan") {
95
+ Value::Object(map) => map,
96
+ _ => Map::new(),
97
+ };
98
+ let score = fields.remove("score").and_then(|value| {
99
+ if value.is_null() {
100
+ None
101
+ } else if let Some(score) = value.as_str() {
102
+ Some(score.to_string())
103
+ } else {
104
+ Some(value.to_string())
105
+ }
106
+ });
107
+
108
+ Self { fields, score }
109
+ }
110
+
111
+ pub fn to_domain(&self) -> Result<Plan, serde_json::Error> {
112
+ let mut fields = self.fields.clone();
113
+ let _ = &self.score;
114
+ fields.insert("score".to_string(), Value::Null);
115
+ serde_json::from_value(Value::Object(fields))
116
+ }
117
+ }
118
+
119
+ impl TelemetryDto {
120
+ pub fn from_runtime(telemetry: &SolverTelemetry) -> Self {
121
+ Self {
122
+ elapsed_ms: duration_to_millis(telemetry.elapsed),
123
+ step_count: telemetry.step_count,
124
+ moves_generated: telemetry.moves_generated,
125
+ moves_evaluated: telemetry.moves_evaluated,
126
+ moves_accepted: telemetry.moves_accepted,
127
+ score_calculations: telemetry.score_calculations,
128
+ generation_ms: duration_to_millis(telemetry.generation_time),
129
+ evaluation_ms: duration_to_millis(telemetry.evaluation_time),
130
+ moves_per_second: whole_units_per_second(telemetry.moves_evaluated, telemetry.elapsed),
131
+ acceptance_rate: derive_acceptance_rate(
132
+ telemetry.moves_accepted,
133
+ telemetry.moves_evaluated,
134
+ ),
135
+ }
136
+ }
137
+ }
138
+
139
+ impl JobSummaryDto {
140
+ pub fn from_status(job_id: usize, status: &SolverStatus<HardSoftScore>) -> Self {
141
+ Self {
142
+ id: job_id.to_string(),
143
+ job_id: job_id.to_string(),
144
+ lifecycle_state: lifecycle_state_label(status.lifecycle_state),
145
+ terminal_reason: status.terminal_reason.map(terminal_reason_label),
146
+ checkpoint_available: status.checkpoint_available,
147
+ event_sequence: status.event_sequence,
148
+ snapshot_revision: status.latest_snapshot_revision,
149
+ current_score: status.current_score.map(|score| score.to_string()),
150
+ best_score: status.best_score.map(|score| score.to_string()),
151
+ telemetry: TelemetryDto::from_runtime(&status.telemetry),
152
+ }
153
+ }
154
+ }
155
+
156
+ impl JobSnapshotDto {
157
+ pub fn from_snapshot(snapshot: &SolverSnapshot<Plan>) -> Self {
158
+ Self {
159
+ id: snapshot.job_id.to_string(),
160
+ job_id: snapshot.job_id.to_string(),
161
+ snapshot_revision: snapshot.snapshot_revision,
162
+ lifecycle_state: lifecycle_state_label(snapshot.lifecycle_state),
163
+ terminal_reason: snapshot.terminal_reason.map(terminal_reason_label),
164
+ current_score: snapshot.current_score.map(|score| score.to_string()),
165
+ best_score: snapshot.best_score.map(|score| score.to_string()),
166
+ telemetry: TelemetryDto::from_runtime(&snapshot.telemetry),
167
+ solution: PlanDto::from_plan(&snapshot.solution),
168
+ }
169
+ }
170
+ }
171
+
172
+ impl JobAnalysisDto {
173
+ pub fn from_snapshot_analysis(
174
+ snapshot: &SolverSnapshotAnalysis<HardSoftScore>,
175
+ analysis: AnalyzeResponse,
176
+ ) -> Self {
177
+ Self {
178
+ id: snapshot.job_id.to_string(),
179
+ job_id: snapshot.job_id.to_string(),
180
+ snapshot_revision: snapshot.snapshot_revision,
181
+ lifecycle_state: lifecycle_state_label(snapshot.lifecycle_state),
182
+ terminal_reason: snapshot.terminal_reason.map(terminal_reason_label),
183
+ analysis,
184
+ }
185
+ }
186
+ }
187
+
188
+ pub fn analysis_response(analysis: &solverforge::ScoreAnalysis<HardSoftScore>) -> AnalyzeResponse {
189
+ AnalyzeResponse {
190
+ score: analysis.score.to_string(),
191
+ constraints: analysis
192
+ .constraints
193
+ .iter()
194
+ .map(|constraint| ConstraintAnalysisDto {
195
+ name: constraint.name.clone(),
196
+ weight: constraint.weight.to_string(),
197
+ score: constraint.score.to_string(),
198
+ match_count: constraint.match_count,
199
+ })
200
+ .collect(),
201
+ }
202
+ }
203
+
204
+ pub fn lifecycle_state_label(state: SolverLifecycleState) -> &'static str {
205
+ match state {
206
+ SolverLifecycleState::Solving => "SOLVING",
207
+ SolverLifecycleState::PauseRequested => "PAUSE_REQUESTED",
208
+ SolverLifecycleState::Paused => "PAUSED",
209
+ SolverLifecycleState::Completed => "COMPLETED",
210
+ SolverLifecycleState::Cancelled => "CANCELLED",
211
+ SolverLifecycleState::Failed => "FAILED",
212
+ }
213
+ }
214
+
215
+ pub fn terminal_reason_label(reason: SolverTerminalReason) -> &'static str {
216
+ match reason {
217
+ SolverTerminalReason::Completed => "completed",
218
+ SolverTerminalReason::TerminatedByConfig => "terminated_by_config",
219
+ SolverTerminalReason::Cancelled => "cancelled",
220
+ SolverTerminalReason::Failed => "failed",
221
+ }
222
+ }
223
+
224
+ fn duration_to_millis(duration: Duration) -> u64 {
225
+ duration.as_millis().min(u128::from(u64::MAX)) as u64
226
+ }
227
+
228
+ fn whole_units_per_second(count: u64, elapsed: Duration) -> u64 {
229
+ let nanos = elapsed.as_nanos();
230
+ if nanos == 0 {
231
+ 0
232
+ } else {
233
+ let per_second = u128::from(count)
234
+ .saturating_mul(1_000_000_000)
235
+ .checked_div(nanos)
236
+ .unwrap_or(0);
237
+ per_second.min(u128::from(u64::MAX)) as u64
238
+ }
239
+ }
240
+
241
+ fn derive_acceptance_rate(moves_accepted: u64, moves_evaluated: u64) -> f64 {
242
+ if moves_evaluated == 0 {
243
+ 0.0
244
+ } else {
245
+ moves_accepted as f64 / moves_evaluated as f64
246
+ }
247
+ }
src/api/mod.rs ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ mod dto;
2
+ mod routes;
3
+ mod sse;
4
+
5
+ pub use dto::PlanDto;
6
+ pub use routes::{router, AppState};
src/api/routes.rs ADDED
@@ -0,0 +1,215 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ use axum::{
2
+ extract::{Path, Query, State},
3
+ http::StatusCode,
4
+ routing::{get, post},
5
+ Json, Router,
6
+ };
7
+ use serde::{Deserialize, Serialize};
8
+ use std::sync::Arc;
9
+
10
+ use super::dto::{analysis_response, JobAnalysisDto, JobSnapshotDto, JobSummaryDto, PlanDto};
11
+ use super::sse;
12
+ use crate::data::{generate, DemoData};
13
+ use crate::solver::SolverService;
14
+
15
+ /// Shared application state.
16
+ pub struct AppState {
17
+ pub solver: SolverService,
18
+ }
19
+
20
+ impl AppState {
21
+ pub fn new() -> Self {
22
+ Self {
23
+ solver: SolverService::new(),
24
+ }
25
+ }
26
+ }
27
+
28
+ impl Default for AppState {
29
+ fn default() -> Self {
30
+ Self::new()
31
+ }
32
+ }
33
+
34
+ /// Creates the API router.
35
+ pub fn router(state: Arc<AppState>) -> Router {
36
+ Router::new()
37
+ .route("/health", get(health))
38
+ .route("/info", get(info))
39
+ .route("/demo-data", get(list_demo_data))
40
+ .route("/demo-data/{id}", get(get_demo_data))
41
+ .route("/jobs", post(create_job))
42
+ .route("/jobs/{id}", get(get_job).delete(delete_job))
43
+ .route("/jobs/{id}/status", get(get_job_status))
44
+ .route("/jobs/{id}/snapshot", get(get_snapshot))
45
+ .route("/jobs/{id}/analysis", get(analyze_by_id))
46
+ .route("/jobs/{id}/pause", post(pause_job))
47
+ .route("/jobs/{id}/resume", post(resume_job))
48
+ .route("/jobs/{id}/cancel", post(cancel_job))
49
+ .route("/jobs/{id}/events", get(sse::events))
50
+ .with_state(state)
51
+ }
52
+
53
+ #[derive(Serialize)]
54
+ struct HealthResponse {
55
+ status: &'static str,
56
+ }
57
+
58
+ async fn health() -> Json<HealthResponse> {
59
+ Json(HealthResponse { status: "UP" })
60
+ }
61
+
62
+ #[derive(Serialize)]
63
+ #[serde(rename_all = "camelCase")]
64
+ struct InfoResponse {
65
+ name: &'static str,
66
+ version: &'static str,
67
+ solver_engine: &'static str,
68
+ }
69
+
70
+ async fn info() -> Json<InfoResponse> {
71
+ Json(InfoResponse {
72
+ name: env!("CARGO_PKG_NAME"),
73
+ version: env!("CARGO_PKG_VERSION"),
74
+ solver_engine: "SolverForge",
75
+ })
76
+ }
77
+
78
+ #[derive(Serialize)]
79
+ #[serde(rename_all = "camelCase")]
80
+ struct DemoDataCatalogResponse {
81
+ default_id: &'static str,
82
+ available_ids: Vec<&'static str>,
83
+ }
84
+
85
+ async fn list_demo_data() -> Json<DemoDataCatalogResponse> {
86
+ Json(DemoDataCatalogResponse {
87
+ default_id: DemoData::default_demo_data().id(),
88
+ available_ids: DemoData::available_demo_data()
89
+ .iter()
90
+ .map(|demo| demo.id())
91
+ .collect(),
92
+ })
93
+ }
94
+
95
+ async fn get_demo_data(Path(id): Path<String>) -> Result<Json<PlanDto>, StatusCode> {
96
+ let demo = id.parse::<DemoData>().map_err(|_| StatusCode::NOT_FOUND)?;
97
+ let plan = generate(demo);
98
+ Ok(Json(PlanDto::from_plan(&plan)))
99
+ }
100
+
101
+ #[derive(Serialize)]
102
+ #[serde(rename_all = "camelCase")]
103
+ struct CreateJobResponse {
104
+ id: String,
105
+ }
106
+
107
+ async fn create_job(
108
+ State(state): State<Arc<AppState>>,
109
+ Json(dto): Json<PlanDto>,
110
+ ) -> Result<Json<CreateJobResponse>, StatusCode> {
111
+ let plan = dto.to_domain().map_err(|_| StatusCode::BAD_REQUEST)?;
112
+ let id = state
113
+ .solver
114
+ .start_job(plan)
115
+ .map_err(status_from_solver_error)?;
116
+ Ok(Json(CreateJobResponse { id }))
117
+ }
118
+
119
+ async fn get_job(
120
+ State(state): State<Arc<AppState>>,
121
+ Path(id): Path<String>,
122
+ ) -> Result<Json<JobSummaryDto>, StatusCode> {
123
+ let job_id = parse_job_id(&id)?;
124
+ let status = state
125
+ .solver
126
+ .get_status(&id)
127
+ .map_err(status_from_solver_error)?;
128
+ Ok(Json(JobSummaryDto::from_status(job_id, &status)))
129
+ }
130
+
131
+ async fn get_job_status(
132
+ State(state): State<Arc<AppState>>,
133
+ Path(id): Path<String>,
134
+ ) -> Result<Json<JobSummaryDto>, StatusCode> {
135
+ get_job(State(state), Path(id)).await
136
+ }
137
+
138
+ #[derive(Debug, Default, Deserialize)]
139
+ struct SnapshotQuery {
140
+ snapshot_revision: Option<u64>,
141
+ }
142
+
143
+ async fn get_snapshot(
144
+ State(state): State<Arc<AppState>>,
145
+ Path(id): Path<String>,
146
+ Query(query): Query<SnapshotQuery>,
147
+ ) -> Result<Json<JobSnapshotDto>, StatusCode> {
148
+ let snapshot = state
149
+ .solver
150
+ .get_snapshot(&id, query.snapshot_revision)
151
+ .map_err(status_from_solver_error)?;
152
+ Ok(Json(JobSnapshotDto::from_snapshot(&snapshot)))
153
+ }
154
+
155
+ async fn analyze_by_id(
156
+ State(state): State<Arc<AppState>>,
157
+ Path(id): Path<String>,
158
+ Query(query): Query<SnapshotQuery>,
159
+ ) -> Result<Json<JobAnalysisDto>, StatusCode> {
160
+ let snapshot_analysis = state
161
+ .solver
162
+ .analyze_snapshot(&id, query.snapshot_revision)
163
+ .map_err(status_from_solver_error)?;
164
+ let analysis = analysis_response(&snapshot_analysis.analysis);
165
+ Ok(Json(JobAnalysisDto::from_snapshot_analysis(
166
+ &snapshot_analysis,
167
+ analysis,
168
+ )))
169
+ }
170
+
171
+ async fn pause_job(
172
+ State(state): State<Arc<AppState>>,
173
+ Path(id): Path<String>,
174
+ ) -> Result<StatusCode, StatusCode> {
175
+ state.solver.pause(&id).map_err(status_from_solver_error)?;
176
+ Ok(StatusCode::ACCEPTED)
177
+ }
178
+
179
+ async fn resume_job(
180
+ State(state): State<Arc<AppState>>,
181
+ Path(id): Path<String>,
182
+ ) -> Result<StatusCode, StatusCode> {
183
+ state.solver.resume(&id).map_err(status_from_solver_error)?;
184
+ Ok(StatusCode::ACCEPTED)
185
+ }
186
+
187
+ async fn cancel_job(
188
+ State(state): State<Arc<AppState>>,
189
+ Path(id): Path<String>,
190
+ ) -> Result<StatusCode, StatusCode> {
191
+ state.solver.cancel(&id).map_err(status_from_solver_error)?;
192
+ Ok(StatusCode::ACCEPTED)
193
+ }
194
+
195
+ async fn delete_job(
196
+ State(state): State<Arc<AppState>>,
197
+ Path(id): Path<String>,
198
+ ) -> Result<StatusCode, StatusCode> {
199
+ state.solver.delete(&id).map_err(status_from_solver_error)?;
200
+ Ok(StatusCode::NO_CONTENT)
201
+ }
202
+
203
+ fn parse_job_id(id: &str) -> Result<usize, StatusCode> {
204
+ id.parse::<usize>().map_err(|_| StatusCode::NOT_FOUND)
205
+ }
206
+
207
+ fn status_from_solver_error(error: solverforge::SolverManagerError) -> StatusCode {
208
+ match error {
209
+ solverforge::SolverManagerError::NoFreeJobSlots => StatusCode::SERVICE_UNAVAILABLE,
210
+ solverforge::SolverManagerError::JobNotFound { .. } => StatusCode::NOT_FOUND,
211
+ solverforge::SolverManagerError::InvalidStateTransition { .. } => StatusCode::CONFLICT,
212
+ solverforge::SolverManagerError::NoSnapshotAvailable { .. } => StatusCode::CONFLICT,
213
+ solverforge::SolverManagerError::SnapshotNotFound { .. } => StatusCode::NOT_FOUND,
214
+ }
215
+ }
src/api/sse.rs ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ use axum::{
2
+ body::Body,
3
+ extract::{Path, State},
4
+ http::{header, StatusCode},
5
+ response::Response,
6
+ };
7
+ use std::sync::Arc;
8
+ use tokio_stream::wrappers::BroadcastStream;
9
+ use tokio_stream::StreamExt;
10
+
11
+ use super::routes::AppState;
12
+
13
+ pub async fn events(
14
+ State(state): State<Arc<AppState>>,
15
+ Path(id): Path<String>,
16
+ ) -> Result<Response<Body>, StatusCode> {
17
+ let rx = state.solver.subscribe(&id).ok_or(StatusCode::NOT_FOUND)?;
18
+ let bootstrap_json = state
19
+ .solver
20
+ .bootstrap_event(&id)
21
+ .map_err(|_| StatusCode::NOT_FOUND)?;
22
+ let bootstrap_event_sequence = event_sequence_from_json(&bootstrap_json);
23
+ let bootstrap = tokio_stream::iter(std::iter::once(Ok::<_, std::convert::Infallible>(
24
+ format!("data: {}\n\n", bootstrap_json).into_bytes(),
25
+ )));
26
+
27
+ let live = BroadcastStream::new(rx).filter_map(move |msg| match msg {
28
+ Ok(json) => {
29
+ if event_is_not_newer(&json, bootstrap_event_sequence) {
30
+ return None;
31
+ }
32
+ Some(Ok::<_, std::convert::Infallible>(
33
+ format!("data: {}\n\n", json).into_bytes(),
34
+ ))
35
+ }
36
+ Err(_) => None, // Lagged - skip missed messages
37
+ });
38
+
39
+ let stream = bootstrap.chain(live);
40
+
41
+ Ok(Response::builder()
42
+ .header(header::CONTENT_TYPE, "text/event-stream")
43
+ .header(header::CACHE_CONTROL, "no-cache")
44
+ .header("X-Accel-Buffering", "no")
45
+ .body(Body::from_stream(stream))
46
+ .unwrap())
47
+ }
48
+
49
+ fn event_sequence_from_json(json: &str) -> Option<u64> {
50
+ serde_json::from_str::<serde_json::Value>(json)
51
+ .ok()
52
+ .and_then(|value| {
53
+ value
54
+ .get("eventSequence")
55
+ .and_then(serde_json::Value::as_u64)
56
+ })
57
+ }
58
+
59
+ fn event_is_not_newer(json: &str, bootstrap_event_sequence: Option<u64>) -> bool {
60
+ let Some(bootstrap_event_sequence) = bootstrap_event_sequence else {
61
+ return false;
62
+ };
63
+ event_sequence_from_json(json)
64
+ .is_some_and(|event_sequence| event_sequence <= bootstrap_event_sequence)
65
+ }
src/constraints/mod.rs ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Constraint definitions.
2
+
3
+ Add constraint modules with `solverforge generate constraint ...`.
4
+ The neutral shell starts with an empty constraint set. */
5
+
6
+ use crate::domain::Plan;
7
+ use solverforge::prelude::*;
8
+
9
+ pub use self::assemble::create_constraints;
10
+
11
+ // @solverforge:begin constraint-modules
12
+ // @solverforge:end constraint-modules
13
+
14
+ mod assemble {
15
+ use super::*;
16
+
17
+ pub fn create_constraints() -> impl ConstraintSet<Plan, HardSoftScore> {
18
+ // @solverforge:begin constraint-calls
19
+ ()
20
+ // @solverforge:end constraint-calls
21
+ }
22
+ }
src/data/data_seed.rs ADDED
@@ -0,0 +1,66 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // @generated by solverforge-cli: data v1
2
+
3
+ use std::str::FromStr;
4
+
5
+ use crate::domain::Plan;
6
+
7
+ #[derive(Debug, Clone, Copy, PartialEq, Eq)]
8
+ pub enum DemoData {
9
+ Small,
10
+ Standard,
11
+ Large,
12
+ }
13
+
14
+ const AVAILABLE_DEMO_DATA: &[DemoData] = &[DemoData::Small, DemoData::Standard, DemoData::Large];
15
+ const DEFAULT_DEMO_DATA: DemoData = DemoData::Standard;
16
+
17
+ pub fn default_demo_data() -> DemoData {
18
+ DEFAULT_DEMO_DATA
19
+ }
20
+
21
+ pub fn available_demo_data() -> &'static [DemoData] {
22
+ AVAILABLE_DEMO_DATA
23
+ }
24
+
25
+ impl DemoData {
26
+ pub fn id(self) -> &'static str {
27
+ match self {
28
+ DemoData::Small => "SMALL",
29
+ DemoData::Standard => "STANDARD",
30
+ DemoData::Large => "LARGE",
31
+ }
32
+ }
33
+
34
+ pub fn default_demo_data() -> Self {
35
+ default_demo_data()
36
+ }
37
+
38
+ pub fn available_demo_data() -> &'static [Self] {
39
+ available_demo_data()
40
+ }
41
+ }
42
+
43
+ impl FromStr for DemoData {
44
+ type Err = ();
45
+
46
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
47
+ match s.to_ascii_uppercase().as_str() {
48
+ "SMALL" => Ok(DemoData::Small),
49
+ "STANDARD" => Ok(DemoData::Standard),
50
+ "LARGE" => Ok(DemoData::Large),
51
+ _ => Err(()),
52
+ }
53
+ }
54
+ }
55
+
56
+ pub fn generate(demo: DemoData) -> Plan {
57
+ match demo {
58
+ DemoData::Small => generate_plan(),
59
+ DemoData::Standard => generate_plan(),
60
+ DemoData::Large => generate_plan(),
61
+ }
62
+ }
63
+
64
+ fn generate_plan() -> Plan {
65
+ Plan::new()
66
+ }
src/data/mod.rs ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ mod data_seed;
2
+
3
+ pub use data_seed::{available_demo_data, default_demo_data, generate, DemoData};
src/domain/mod.rs ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ solverforge::planning_model! {
2
+ root = "src/domain";
3
+
4
+ // @solverforge:begin domain-exports
5
+ mod plan;
6
+
7
+ pub use plan::Plan;
8
+ // @solverforge:end domain-exports
9
+ }
src/domain/plan.rs ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ use serde::{Deserialize, Serialize};
2
+ use solverforge::prelude::*;
3
+
4
+ // @solverforge:neutral-solution
5
+ // @solverforge:begin solution-imports
6
+ // @solverforge:end solution-imports
7
+
8
+ /// The root planning solution.
9
+ ///
10
+ /// Fresh projects start as a neutral shell. Add fact collections, planning
11
+ /// entity collections, and variable fields through the CLI as your domain
12
+ /// takes shape.
13
+ #[planning_solution(
14
+ constraints = "crate::constraints::create_constraints",
15
+ solver_toml = "../../solver.toml"
16
+ )]
17
+ #[derive(Serialize, Deserialize)]
18
+ pub struct Plan {
19
+ // @solverforge:begin solution-collections
20
+ // @solverforge:end solution-collections
21
+ #[planning_score]
22
+ pub score: Option<HardSoftScore>,
23
+ }
24
+
25
+ impl Plan {
26
+ #[rustfmt::skip]
27
+ pub fn new(
28
+ // @solverforge:begin solution-constructor-params
29
+ // @solverforge:end solution-constructor-params
30
+ ) -> Self {
31
+ Self {
32
+ // @solverforge:begin solution-constructor-init
33
+ // @solverforge:end solution-constructor-init
34
+ score: None,
35
+ }
36
+ }
37
+ }
src/lib.rs ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* solverforge-fsr — neutral constraint optimizer built with SolverForge
2
+
3
+ Structure:
4
+ domain/ — Plan (solution) plus CLI-generated entities and facts
5
+ constraints/ — Scoring rules
6
+ solver/ — Engine, service, termination config
7
+ api/ — HTTP API (axum)
8
+ data/ — Demo data / data loading */
9
+
10
+ pub mod api;
11
+ pub mod constraints;
12
+ pub mod data;
13
+ pub mod domain;
14
+ pub mod solver;
src/main.rs ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* solverforge-fsr — unified optimizer with SolverForge
2
+ Run with: solverforge server
3
+ Then open the printed local URL (default port 7860) */
4
+
5
+ use solverforge_fsr::api;
6
+
7
+ use std::net::SocketAddr;
8
+ use std::sync::Arc;
9
+ use tower_http::cors::{Any, CorsLayer};
10
+ use tower_http::services::ServeDir;
11
+
12
+ #[tokio::main]
13
+ async fn main() {
14
+ solverforge::console::init();
15
+
16
+ let state = Arc::new(api::AppState::new());
17
+
18
+ let cors = CorsLayer::new()
19
+ .allow_origin(Any)
20
+ .allow_methods(Any)
21
+ .allow_headers(Any);
22
+
23
+ let app = api::router(state)
24
+ .merge(solverforge_ui::routes())
25
+ .fallback_service(ServeDir::new("static"))
26
+ .layer(cors);
27
+
28
+ let port = std::env::var("PORT")
29
+ .ok()
30
+ .and_then(|value| value.parse::<u16>().ok())
31
+ .unwrap_or(7860);
32
+ let addr = SocketAddr::from(([0, 0, 0, 0], port));
33
+ println!("▸ solverforge-fsr listening on http://{}", addr);
34
+ println!("▸ Open http://localhost:{} in your browser\n", port);
35
+
36
+ let listener = tokio::net::TcpListener::bind(addr).await.unwrap();
37
+ axum::serve(listener, app).await.unwrap();
38
+ }
src/solver/mod.rs ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ mod service;
2
+
3
+ pub use service::SolverService;
4
+ pub use solverforge::SolverStatus;
src/solver/service.rs ADDED
@@ -0,0 +1,364 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ use parking_lot::RwLock;
2
+ use serde::Serialize;
3
+ use std::collections::HashMap;
4
+ use std::sync::Arc;
5
+ use std::time::Duration;
6
+ use tokio::sync::{broadcast, mpsc};
7
+
8
+ use solverforge::{
9
+ HardSoftScore, SolverEvent, SolverEventMetadata, SolverLifecycleState, SolverManager,
10
+ SolverManagerError, SolverSnapshot, SolverSnapshotAnalysis, SolverStatus, SolverTelemetry,
11
+ SolverTerminalReason,
12
+ };
13
+
14
+ use crate::api::PlanDto;
15
+ use crate::domain::Plan;
16
+
17
+ // Static manager — must be 'static for retained job execution.
18
+ static MANAGER: SolverManager<Plan> = SolverManager::new();
19
+
20
+ #[derive(Serialize)]
21
+ #[serde(rename_all = "camelCase")]
22
+ struct TelemetryPayload {
23
+ elapsed_ms: u64,
24
+ step_count: u64,
25
+ moves_generated: u64,
26
+ moves_evaluated: u64,
27
+ moves_accepted: u64,
28
+ score_calculations: u64,
29
+ generation_ms: u64,
30
+ evaluation_ms: u64,
31
+ moves_per_second: u64,
32
+ acceptance_rate: f64,
33
+ }
34
+
35
+ #[derive(Serialize)]
36
+ #[serde(rename_all = "camelCase")]
37
+ struct JobEventPayload {
38
+ id: String,
39
+ job_id: String,
40
+ event_type: &'static str,
41
+ event_sequence: u64,
42
+ lifecycle_state: &'static str,
43
+ terminal_reason: Option<&'static str>,
44
+ telemetry: TelemetryPayload,
45
+ current_score: Option<String>,
46
+ best_score: Option<String>,
47
+ snapshot_revision: Option<u64>,
48
+ solution: Option<PlanDto>,
49
+ error: Option<String>,
50
+ }
51
+
52
+ struct JobState {
53
+ sse_tx: broadcast::Sender<String>,
54
+ }
55
+
56
+ /// Manages retained solving jobs and broadcasts lifecycle-complete SSE payloads.
57
+ pub struct SolverService {
58
+ jobs: Arc<RwLock<HashMap<usize, JobState>>>,
59
+ }
60
+
61
+ impl SolverService {
62
+ pub fn new() -> Self {
63
+ Self {
64
+ jobs: Arc::new(RwLock::new(HashMap::new())),
65
+ }
66
+ }
67
+
68
+ pub fn start_job(&self, plan: Plan) -> Result<String, SolverManagerError> {
69
+ let (job_id, receiver) = MANAGER.solve(plan)?;
70
+ let (sse_tx, _) = broadcast::channel(64);
71
+
72
+ self.jobs.write().insert(
73
+ job_id,
74
+ JobState {
75
+ sse_tx: sse_tx.clone(),
76
+ },
77
+ );
78
+
79
+ let jobs = Arc::clone(&self.jobs);
80
+ tokio::spawn(async move {
81
+ drain_receiver(jobs, job_id, sse_tx, receiver).await;
82
+ });
83
+
84
+ Ok(job_id.to_string())
85
+ }
86
+
87
+ pub fn subscribe(&self, id: &str) -> Option<broadcast::Receiver<String>> {
88
+ let job_id = parse_job_id(id).ok()?;
89
+ self.jobs
90
+ .read()
91
+ .get(&job_id)
92
+ .map(|state| state.sse_tx.subscribe())
93
+ }
94
+
95
+ pub fn bootstrap_event(&self, id: &str) -> Result<String, SolverManagerError> {
96
+ let job_id = parse_job_id(id)?;
97
+ let status = MANAGER.get_status(job_id)?;
98
+ if let Some(revision) = status.latest_snapshot_revision {
99
+ let snapshot = MANAGER.get_snapshot(job_id, Some(revision))?;
100
+ return Ok(snapshot_status_event_payload(
101
+ job_id,
102
+ bootstrap_snapshot_event_type(status.lifecycle_state),
103
+ &status,
104
+ &snapshot,
105
+ ));
106
+ }
107
+
108
+ Ok(status_event_payload(
109
+ job_id,
110
+ bootstrap_event_type(status.lifecycle_state),
111
+ &status,
112
+ ))
113
+ }
114
+
115
+ pub fn get_status(&self, id: &str) -> Result<SolverStatus<HardSoftScore>, SolverManagerError> {
116
+ let job_id = parse_job_id(id)?;
117
+ MANAGER.get_status(job_id)
118
+ }
119
+
120
+ pub fn pause(&self, id: &str) -> Result<(), SolverManagerError> {
121
+ MANAGER.pause(parse_job_id(id)?)
122
+ }
123
+
124
+ pub fn resume(&self, id: &str) -> Result<(), SolverManagerError> {
125
+ MANAGER.resume(parse_job_id(id)?)
126
+ }
127
+
128
+ pub fn cancel(&self, id: &str) -> Result<(), SolverManagerError> {
129
+ MANAGER.cancel(parse_job_id(id)?)
130
+ }
131
+
132
+ pub fn delete(&self, id: &str) -> Result<(), SolverManagerError> {
133
+ let job_id = parse_job_id(id)?;
134
+ MANAGER.delete(job_id)?;
135
+ self.jobs.write().remove(&job_id);
136
+ Ok(())
137
+ }
138
+
139
+ pub fn get_snapshot(
140
+ &self,
141
+ id: &str,
142
+ snapshot_revision: Option<u64>,
143
+ ) -> Result<SolverSnapshot<Plan>, SolverManagerError> {
144
+ MANAGER.get_snapshot(parse_job_id(id)?, snapshot_revision)
145
+ }
146
+
147
+ pub fn analyze_snapshot(
148
+ &self,
149
+ id: &str,
150
+ snapshot_revision: Option<u64>,
151
+ ) -> Result<SolverSnapshotAnalysis<HardSoftScore>, SolverManagerError> {
152
+ MANAGER.analyze_snapshot(parse_job_id(id)?, snapshot_revision)
153
+ }
154
+ }
155
+
156
+ async fn drain_receiver(
157
+ jobs: Arc<RwLock<HashMap<usize, JobState>>>,
158
+ job_id: usize,
159
+ sse_tx: broadcast::Sender<String>,
160
+ mut receiver: mpsc::UnboundedReceiver<SolverEvent<Plan>>,
161
+ ) {
162
+ while let Some(event) = receiver.recv().await {
163
+ let payload = match &event {
164
+ SolverEvent::Progress { metadata } => {
165
+ event_payload(job_id, "progress", metadata, None, None)
166
+ }
167
+ SolverEvent::BestSolution { metadata, solution } => {
168
+ event_payload(job_id, "best_solution", metadata, Some(solution), None)
169
+ }
170
+ SolverEvent::PauseRequested { metadata } => {
171
+ event_payload(job_id, "pause_requested", metadata, None, None)
172
+ }
173
+ SolverEvent::Paused { metadata } => {
174
+ event_payload(job_id, "paused", metadata, None, None)
175
+ }
176
+ SolverEvent::Resumed { metadata } => {
177
+ event_payload(job_id, "resumed", metadata, None, None)
178
+ }
179
+ SolverEvent::Completed { metadata, solution } => {
180
+ event_payload(job_id, "completed", metadata, Some(solution), None)
181
+ }
182
+ SolverEvent::Cancelled { metadata } => {
183
+ event_payload(job_id, "cancelled", metadata, None, None)
184
+ }
185
+ SolverEvent::Failed { metadata, error } => {
186
+ event_payload(job_id, "failed", metadata, None, Some(error.as_str()))
187
+ }
188
+ };
189
+
190
+ if !jobs.read().contains_key(&job_id) {
191
+ return;
192
+ }
193
+
194
+ let _ = sse_tx.send(payload);
195
+ }
196
+ }
197
+
198
+ fn parse_job_id(id: &str) -> Result<usize, SolverManagerError> {
199
+ id.parse::<usize>()
200
+ .map_err(|_| SolverManagerError::JobNotFound { job_id: usize::MAX })
201
+ }
202
+
203
+ fn status_event_payload(
204
+ job_id: usize,
205
+ event_type: &'static str,
206
+ status: &SolverStatus<HardSoftScore>,
207
+ ) -> String {
208
+ serialize_payload(JobEventPayload {
209
+ id: job_id.to_string(),
210
+ job_id: job_id.to_string(),
211
+ event_type,
212
+ event_sequence: status.event_sequence,
213
+ lifecycle_state: lifecycle_state_label(status.lifecycle_state),
214
+ terminal_reason: status.terminal_reason.map(terminal_reason_label),
215
+ telemetry: telemetry_payload(&status.telemetry),
216
+ current_score: status.current_score.map(|score| score.to_string()),
217
+ best_score: status.best_score.map(|score| score.to_string()),
218
+ snapshot_revision: status.latest_snapshot_revision,
219
+ solution: None,
220
+ error: None,
221
+ })
222
+ }
223
+
224
+ fn snapshot_status_event_payload(
225
+ job_id: usize,
226
+ event_type: &'static str,
227
+ status: &SolverStatus<HardSoftScore>,
228
+ snapshot: &SolverSnapshot<Plan>,
229
+ ) -> String {
230
+ serialize_payload(JobEventPayload {
231
+ id: job_id.to_string(),
232
+ job_id: job_id.to_string(),
233
+ event_type,
234
+ event_sequence: status.event_sequence,
235
+ lifecycle_state: lifecycle_state_label(status.lifecycle_state),
236
+ terminal_reason: status.terminal_reason.map(terminal_reason_label),
237
+ telemetry: telemetry_payload(&status.telemetry),
238
+ current_score: status
239
+ .current_score
240
+ .or(snapshot.current_score)
241
+ .map(|score| score.to_string()),
242
+ best_score: status
243
+ .best_score
244
+ .or(snapshot.best_score)
245
+ .map(|score| score.to_string()),
246
+ snapshot_revision: Some(snapshot.snapshot_revision),
247
+ solution: Some(PlanDto::from_plan(&snapshot.solution)),
248
+ error: None,
249
+ })
250
+ }
251
+
252
+ fn bootstrap_event_type(state: SolverLifecycleState) -> &'static str {
253
+ match state {
254
+ SolverLifecycleState::Solving => "progress",
255
+ SolverLifecycleState::PauseRequested => "pause_requested",
256
+ SolverLifecycleState::Paused => "paused",
257
+ SolverLifecycleState::Completed => "completed",
258
+ SolverLifecycleState::Cancelled => "cancelled",
259
+ SolverLifecycleState::Failed => "failed",
260
+ }
261
+ }
262
+
263
+ fn bootstrap_snapshot_event_type(state: SolverLifecycleState) -> &'static str {
264
+ match state {
265
+ SolverLifecycleState::Solving => "best_solution",
266
+ other => bootstrap_event_type(other),
267
+ }
268
+ }
269
+
270
+ fn event_payload(
271
+ job_id: usize,
272
+ event_type: &'static str,
273
+ metadata: &SolverEventMetadata<HardSoftScore>,
274
+ solution: Option<&Plan>,
275
+ error: Option<&str>,
276
+ ) -> String {
277
+ serialize_payload(JobEventPayload {
278
+ id: job_id.to_string(),
279
+ job_id: job_id.to_string(),
280
+ event_type,
281
+ event_sequence: metadata.event_sequence,
282
+ lifecycle_state: lifecycle_state_label(metadata.lifecycle_state),
283
+ terminal_reason: metadata.terminal_reason.map(terminal_reason_label),
284
+ telemetry: telemetry_payload(&metadata.telemetry),
285
+ current_score: metadata.current_score.map(|score| score.to_string()),
286
+ best_score: metadata.best_score.map(|score| score.to_string()),
287
+ snapshot_revision: metadata.snapshot_revision,
288
+ solution: solution.map(PlanDto::from_plan),
289
+ error: error.map(ToOwned::to_owned),
290
+ })
291
+ }
292
+
293
+ fn serialize_payload(payload: JobEventPayload) -> String {
294
+ serde_json::to_string(&payload).expect("failed to serialize solver lifecycle payload")
295
+ }
296
+
297
+ fn telemetry_payload(telemetry: &SolverTelemetry) -> TelemetryPayload {
298
+ TelemetryPayload {
299
+ elapsed_ms: duration_to_millis(telemetry.elapsed),
300
+ step_count: telemetry.step_count,
301
+ moves_generated: telemetry.moves_generated,
302
+ moves_evaluated: telemetry.moves_evaluated,
303
+ moves_accepted: telemetry.moves_accepted,
304
+ score_calculations: telemetry.score_calculations,
305
+ generation_ms: duration_to_millis(telemetry.generation_time),
306
+ evaluation_ms: duration_to_millis(telemetry.evaluation_time),
307
+ moves_per_second: whole_units_per_second(telemetry.moves_evaluated, telemetry.elapsed),
308
+ acceptance_rate: derive_acceptance_rate(
309
+ telemetry.moves_accepted,
310
+ telemetry.moves_evaluated,
311
+ ),
312
+ }
313
+ }
314
+
315
+ fn lifecycle_state_label(state: SolverLifecycleState) -> &'static str {
316
+ match state {
317
+ SolverLifecycleState::Solving => "SOLVING",
318
+ SolverLifecycleState::PauseRequested => "PAUSE_REQUESTED",
319
+ SolverLifecycleState::Paused => "PAUSED",
320
+ SolverLifecycleState::Completed => "COMPLETED",
321
+ SolverLifecycleState::Cancelled => "CANCELLED",
322
+ SolverLifecycleState::Failed => "FAILED",
323
+ }
324
+ }
325
+
326
+ fn terminal_reason_label(reason: SolverTerminalReason) -> &'static str {
327
+ match reason {
328
+ SolverTerminalReason::Completed => "completed",
329
+ SolverTerminalReason::TerminatedByConfig => "terminated_by_config",
330
+ SolverTerminalReason::Cancelled => "cancelled",
331
+ SolverTerminalReason::Failed => "failed",
332
+ }
333
+ }
334
+
335
+ impl Default for SolverService {
336
+ fn default() -> Self {
337
+ Self::new()
338
+ }
339
+ }
340
+
341
+ fn duration_to_millis(duration: Duration) -> u64 {
342
+ duration.as_millis().min(u128::from(u64::MAX)) as u64
343
+ }
344
+
345
+ fn whole_units_per_second(count: u64, elapsed: Duration) -> u64 {
346
+ let nanos = elapsed.as_nanos();
347
+ if nanos == 0 {
348
+ 0
349
+ } else {
350
+ let per_second = u128::from(count)
351
+ .saturating_mul(1_000_000_000)
352
+ .checked_div(nanos)
353
+ .unwrap_or(0);
354
+ per_second.min(u128::from(u64::MAX)) as u64
355
+ }
356
+ }
357
+
358
+ fn derive_acceptance_rate(moves_accepted: u64, moves_evaluated: u64) -> f64 {
359
+ if moves_evaluated == 0 {
360
+ 0.0
361
+ } else {
362
+ moves_accepted as f64 / moves_evaluated as f64
363
+ }
364
+ }
static/app.js ADDED
@@ -0,0 +1,808 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* app.js — solverforge-fsr SolverForge UI */
2
+
3
+ (async function () {
4
+ 'use strict';
5
+
6
+ var SLOT_MINUTES = 60;
7
+ var DEFAULT_VIEWPORT_SLOTS = 12;
8
+ var TIMELINE_TONES = ['emerald', 'blue', 'amber', 'rose', 'violet', 'slate'];
9
+
10
+ var config = await fetch('/sf-config.json').then(function (response) { return response.json(); });
11
+ var uiModel = await fetch('/generated/ui-model.json').then(function (response) { return response.json(); });
12
+ var app = document.getElementById('sf-app');
13
+ var backend = SF.createBackend({ baseUrl: '' });
14
+ var statusBar = SF.createStatusBar({ constraints: uiModel.constraints || [] });
15
+ var currentPlan = null;
16
+ var lastAnalysis = null;
17
+ var bootstrapError = null;
18
+ var demoCatalog = { defaultId: null, availableIds: [] };
19
+ var activeTab = (uiModel.views && uiModel.views.length) ? uiModel.views[0].id : 'overview';
20
+ var viewPanels = {};
21
+ var viewTimelines = {};
22
+
23
+ var tabs = (uiModel.views || []).map(function (view, index) {
24
+ return {
25
+ id: view.id,
26
+ label: view.label,
27
+ icon: view.kind === 'list' ? 'fa-list-ol' : 'fa-table-cells-large',
28
+ active: index === 0,
29
+ };
30
+ });
31
+ if (!tabs.length) {
32
+ tabs.push({ id: 'overview', label: 'Overview', icon: 'fa-compass', active: true });
33
+ }
34
+ tabs.push({ id: 'data', label: 'Data', icon: 'fa-table' });
35
+ tabs.push({ id: 'api', label: 'REST API', icon: 'fa-book' });
36
+
37
+ var header = SF.createHeader({
38
+ logo: '/sf/img/ouroboros.svg',
39
+ title: config.title,
40
+ subtitle: config.subtitle,
41
+ tabs: tabs,
42
+ actions: {
43
+ onSolve: function () { loadAndSolve(); },
44
+ onPause: function () { pauseSolve(); },
45
+ onResume: function () { resumeSolve(); },
46
+ onCancel: function () { cancelSolve(); },
47
+ onAnalyze: function () { openAnalysis(); },
48
+ },
49
+ onTabChange: function (tab) {
50
+ activeTab = tab;
51
+ Object.keys(viewPanels).forEach(function (key) {
52
+ viewPanels[key].style.display = key === tab ? '' : 'none';
53
+ });
54
+ overviewPanel.style.display = tab === 'overview' ? '' : 'none';
55
+ dataPanel.style.display = tab === 'data' ? '' : 'none';
56
+ apiPanel.style.display = tab === 'api' ? '' : 'none';
57
+ },
58
+ });
59
+ app.appendChild(header);
60
+ statusBar.bindHeader(header);
61
+ app.appendChild(statusBar.el);
62
+
63
+ var bootstrapNotice = SF.el('div', {
64
+ className: 'sf-content',
65
+ style: {
66
+ display: 'none',
67
+ padding: '16px',
68
+ marginBottom: '16px',
69
+ borderRadius: '12px',
70
+ border: '1px solid #dc2626',
71
+ background: '#fef2f2',
72
+ color: '#991b1b',
73
+ },
74
+ });
75
+ app.appendChild(bootstrapNotice);
76
+
77
+ var overviewPanel = SF.el('div', { className: 'sf-content', style: { display: activeTab === 'overview' ? '' : 'none' } });
78
+ var overviewContainer = SF.el('div', { id: 'sf-overview' });
79
+ overviewPanel.appendChild(overviewContainer);
80
+ app.appendChild(overviewPanel);
81
+
82
+ (uiModel.views || []).forEach(function (view) {
83
+ var panel = SF.el('div', { className: 'sf-content', style: { display: activeTab === view.id ? '' : 'none' } });
84
+ panel.appendChild(SF.el('div', { id: 'view-' + view.id }));
85
+ viewPanels[view.id] = panel;
86
+ app.appendChild(panel);
87
+ });
88
+
89
+ var dataPanel = SF.el('div', { className: 'sf-content', style: { display: 'none' } });
90
+ var tablesContainer = SF.el('div', { id: 'sf-tables' });
91
+ dataPanel.appendChild(tablesContainer);
92
+ app.appendChild(dataPanel);
93
+
94
+ var apiPanel = SF.el('div', { className: 'sf-content', style: { display: 'none' } });
95
+ var apiGuideContainer = SF.el('div');
96
+ apiPanel.appendChild(apiGuideContainer);
97
+ app.appendChild(apiPanel);
98
+
99
+ app.appendChild(SF.createFooter({
100
+ links: [
101
+ { label: 'SolverForge', url: 'https://www.solverforge.org' },
102
+ { label: 'Docs', url: 'https://www.solverforge.org/docs' },
103
+ ],
104
+ }));
105
+
106
+ var analysisModal = SF.createModal({ title: 'Score Analysis', width: '700px' });
107
+ var solver = SF.createSolver({
108
+ backend: backend,
109
+ statusBar: statusBar,
110
+ onProgress: function (meta) {
111
+ syncLifecycleMarkers(meta);
112
+ },
113
+ onPauseRequested: function (meta) {
114
+ syncLifecycleMarkers(meta);
115
+ },
116
+ onSolution: function (snapshot, meta) {
117
+ if (snapshot && snapshot.solution) {
118
+ renderAll(snapshot.solution);
119
+ }
120
+ syncLifecycleMarkers(meta);
121
+ },
122
+ onPaused: function (snapshot, meta) {
123
+ if (snapshot && snapshot.solution) {
124
+ renderAll(snapshot.solution);
125
+ }
126
+ syncLifecycleMarkers(meta);
127
+ },
128
+ onResumed: function (meta) {
129
+ syncLifecycleMarkers(meta);
130
+ },
131
+ onCancelled: function (snapshot, meta) {
132
+ if (snapshot && snapshot.solution) {
133
+ renderAll(snapshot.solution);
134
+ }
135
+ syncLifecycleMarkers(meta);
136
+ },
137
+ onComplete: function (snapshot, meta) {
138
+ if (snapshot && snapshot.solution) {
139
+ renderAll(snapshot.solution);
140
+ }
141
+ syncLifecycleMarkers(meta);
142
+ },
143
+ onFailure: function (message, meta, snapshot, analysis) {
144
+ if (snapshot && snapshot.solution) {
145
+ renderAll(snapshot.solution);
146
+ }
147
+ if (analysis) {
148
+ lastAnalysis = analysis;
149
+ }
150
+ console.error('Solver job failed:', message);
151
+ syncLifecycleMarkers(meta);
152
+ },
153
+ onAnalysis: function (analysis) {
154
+ lastAnalysis = analysis;
155
+ syncLifecycleMarkers();
156
+ },
157
+ onError: function (message) {
158
+ console.error('Solver lifecycle failed:', message);
159
+ syncLifecycleMarkers();
160
+ },
161
+ });
162
+ renderApiGuide();
163
+ updateSolveActionAvailability();
164
+ bootstrapDemoData();
165
+
166
+ window.addEventListener('beforeunload', destroyAllTimelines);
167
+
168
+ function loadAndSolve() {
169
+ if (solver.isRunning() || solver.getLifecycleState() === 'PAUSED' || !canSolve()) return;
170
+ cleanupTerminalJob()
171
+ .then(function (data) {
172
+ return data || resolvePlanForSolve();
173
+ })
174
+ .then(function (data) {
175
+ return solver.start(data);
176
+ })
177
+ .then(function () {
178
+ syncLifecycleMarkers();
179
+ })
180
+ .catch(function (err) { console.error('Solve start failed:', err); });
181
+ }
182
+
183
+ function pauseSolve() {
184
+ solver.pause()
185
+ .then(function () { syncLifecycleMarkers(); })
186
+ .catch(function (err) { console.error('Pause failed:', err); });
187
+ }
188
+
189
+ function resumeSolve() {
190
+ solver.resume()
191
+ .then(function () { syncLifecycleMarkers(); })
192
+ .catch(function (err) { console.error('Resume failed:', err); });
193
+ }
194
+
195
+ function cancelSolve() {
196
+ solver.cancel()
197
+ .then(function () { syncLifecycleMarkers(); })
198
+ .catch(function (err) { console.error('Cancel failed:', err); });
199
+ }
200
+
201
+ function openAnalysis() {
202
+ if (!solver.getJobId()) return;
203
+ solver.analyzeSnapshot()
204
+ .then(function (analysis) {
205
+ lastAnalysis = analysis;
206
+ analysisModal.setBody(buildAnalysisHtml(analysis));
207
+ analysisModal.open();
208
+ })
209
+ .catch(function () {});
210
+ }
211
+
212
+ function renderAll(data) {
213
+ currentPlan = clonePlan(data);
214
+ renderOverview(data);
215
+ renderViews(data);
216
+ renderTables(data);
217
+ }
218
+
219
+ function resolvePlanForSolve() {
220
+ if (currentPlan) {
221
+ return Promise.resolve(clonePlan(currentPlan));
222
+ }
223
+ if (!demoCatalog.defaultId) {
224
+ return Promise.reject(new Error('demo data catalog is unavailable'));
225
+ }
226
+ return fetchDemoPlan(demoCatalog.defaultId);
227
+ }
228
+
229
+ function bootstrapDemoData() {
230
+ fetchDemoCatalog()
231
+ .then(function (catalog) {
232
+ demoCatalog = catalog;
233
+ clearBootstrapError();
234
+ renderApiGuide();
235
+ return fetchDemoPlan(catalog.defaultId);
236
+ })
237
+ .then(function (data) {
238
+ renderAll(data);
239
+ updateSolveActionAvailability();
240
+ })
241
+ .catch(function (err) {
242
+ reportBootstrapError(err);
243
+ });
244
+ }
245
+
246
+ function fetchDemoCatalog() {
247
+ return requestJson('/demo-data', 'demo data catalog')
248
+ .then(function (catalog) {
249
+ if (!catalog || typeof catalog.defaultId !== 'string' || !Array.isArray(catalog.availableIds)) {
250
+ throw new Error('demo data catalog is missing defaultId or availableIds');
251
+ }
252
+ if (catalog.availableIds.indexOf(catalog.defaultId) === -1) {
253
+ throw new Error('demo data catalog defaultId is not present in availableIds');
254
+ }
255
+ return {
256
+ defaultId: catalog.defaultId,
257
+ availableIds: catalog.availableIds.slice(),
258
+ };
259
+ });
260
+ }
261
+
262
+ function fetchDemoPlan(demoId) {
263
+ return requestJson('/demo-data/' + encodeURIComponent(demoId), 'demo data "' + demoId + '"');
264
+ }
265
+
266
+ function requestJson(path, label) {
267
+ return fetch(path)
268
+ .then(function (response) {
269
+ if (!response.ok) {
270
+ throw new Error(label + ' returned HTTP ' + response.status);
271
+ }
272
+ return response.json();
273
+ });
274
+ }
275
+
276
+ function canSolve() {
277
+ return !bootstrapError && !!demoCatalog.defaultId;
278
+ }
279
+
280
+ function reportBootstrapError(err) {
281
+ bootstrapError = describeError(err);
282
+ bootstrapNotice.textContent = 'Demo data bootstrap failed: ' + bootstrapError;
283
+ bootstrapNotice.style.display = '';
284
+ app.dataset.bootstrapError = 'true';
285
+ renderApiGuide();
286
+ updateSolveActionAvailability();
287
+ console.error('Demo data bootstrap failed:', err);
288
+ }
289
+
290
+ function clearBootstrapError() {
291
+ bootstrapError = null;
292
+ bootstrapNotice.textContent = '';
293
+ bootstrapNotice.style.display = 'none';
294
+ delete app.dataset.bootstrapError;
295
+ }
296
+
297
+ function describeError(err) {
298
+ if (err && err.message) {
299
+ return err.message;
300
+ }
301
+ return String(err || 'unknown error');
302
+ }
303
+
304
+ function updateSolveActionAvailability() {
305
+ var solveButton = findHeaderButton('Solve');
306
+ var disabled = !canSolve();
307
+ if (!solveButton) return;
308
+ solveButton.disabled = disabled;
309
+ solveButton.setAttribute('aria-disabled', disabled ? 'true' : 'false');
310
+ solveButton.title = disabled
311
+ ? (bootstrapError ? 'Demo data bootstrap failed.' : 'Loading demo data catalog...')
312
+ : '';
313
+ }
314
+
315
+ function findHeaderButton(label) {
316
+ var buttons = header.querySelectorAll('button');
317
+ for (var i = 0; i < buttons.length; i += 1) {
318
+ var text = (buttons[i].textContent || '').trim();
319
+ if (text === label) {
320
+ return buttons[i];
321
+ }
322
+ }
323
+ return null;
324
+ }
325
+
326
+ function renderApiGuide() {
327
+ apiGuideContainer.innerHTML = '';
328
+ apiGuideContainer.appendChild(SF.createApiGuide({
329
+ endpoints: buildApiGuideEndpoints(),
330
+ }));
331
+ }
332
+
333
+ function buildApiGuideEndpoints() {
334
+ var defaultDemoPath = demoCatalog.defaultId
335
+ ? '/demo-data/' + demoCatalog.defaultId
336
+ : '/demo-data/{defaultId}';
337
+ return [
338
+ { method: 'GET', path: '/demo-data', description: 'Discover the default and available demo data IDs', curl: buildCurlCommand('GET', '/demo-data') },
339
+ { method: 'GET', path: defaultDemoPath, description: 'Fetch the discovered default demo data', curl: buildCurlCommand('GET', defaultDemoPath) },
340
+ { method: 'POST', path: '/jobs', description: 'Create a retained solving job', curl: buildCurlCommand('POST', '/jobs', { json: true, data: '@plan.json' }) },
341
+ { method: 'GET', path: '/jobs/{id}', description: 'Get current job summary', curl: buildCurlCommand('GET', '/jobs/{id}') },
342
+ { method: 'GET', path: '/jobs/{id}/snapshot', description: 'Fetch the latest retained snapshot', curl: buildCurlCommand('GET', '/jobs/{id}/snapshot') },
343
+ { method: 'GET', path: '/jobs/{id}/analysis?snapshot_revision={n}', description: 'Analyze an exact snapshot revision', curl: buildCurlCommand('GET', '/jobs/{id}/analysis?snapshot_revision=3', { quoteUrl: true }) },
344
+ { method: 'POST', path: '/jobs/{id}/pause', description: 'Request an exact runtime pause', curl: buildCurlCommand('POST', '/jobs/{id}/pause') },
345
+ { method: 'POST', path: '/jobs/{id}/resume', description: 'Resume a paused retained job', curl: buildCurlCommand('POST', '/jobs/{id}/resume') },
346
+ { method: 'POST', path: '/jobs/{id}/cancel', description: 'Cancel a live or paused job', curl: buildCurlCommand('POST', '/jobs/{id}/cancel') },
347
+ { method: 'DELETE', path: '/jobs/{id}', description: 'Delete a terminal retained job', curl: buildCurlCommand('DELETE', '/jobs/{id}') },
348
+ { method: 'GET', path: '/jobs/{id}/events', description: 'Stream job lifecycle updates (SSE)', curl: buildCurlCommand('GET', '/jobs/{id}/events', { stream: true }) },
349
+ ];
350
+ }
351
+
352
+ function buildCurlCommand(method, path, options) {
353
+ var parts = ['curl'];
354
+ if (options && options.stream) {
355
+ parts.push('-N');
356
+ }
357
+ if (method && method !== 'GET') {
358
+ parts.push('-X', method);
359
+ }
360
+ if (options && options.json) {
361
+ parts.push('-H', '"Content-Type: application/json"');
362
+ }
363
+
364
+ var url = buildApiUrl(path);
365
+ parts.push(options && options.quoteUrl ? '"' + url + '"' : url);
366
+
367
+ if (options && options.data) {
368
+ parts.push('-d', options.data);
369
+ }
370
+
371
+ return parts.join(' ');
372
+ }
373
+
374
+ function buildApiUrl(path) {
375
+ return currentOrigin() + path;
376
+ }
377
+
378
+ function currentOrigin() {
379
+ return window.location.origin || (window.location.protocol + '//' + window.location.host);
380
+ }
381
+
382
+ function cleanupTerminalJob() {
383
+ var state = solver.getLifecycleState();
384
+ if (!solver.getJobId() || state === 'IDLE' || state === 'PAUSED' || solver.isRunning()) {
385
+ return Promise.resolve(null);
386
+ }
387
+ return solver.delete()
388
+ .then(function () {
389
+ lastAnalysis = null;
390
+ syncLifecycleMarkers();
391
+ return null;
392
+ })
393
+ .catch(function (err) {
394
+ console.error('Delete failed:', err);
395
+ throw err;
396
+ });
397
+ }
398
+
399
+ function syncLifecycleMarkers(meta) {
400
+ var jobId = solver.getJobId();
401
+ var snapshotRevision = solver.getSnapshotRevision();
402
+ var lifecycleState = meta && meta.lifecycleState ? meta.lifecycleState : solver.getLifecycleState();
403
+
404
+ if (jobId) {
405
+ app.dataset.jobId = String(jobId);
406
+ } else {
407
+ delete app.dataset.jobId;
408
+ }
409
+ if (snapshotRevision != null) {
410
+ app.dataset.snapshotRevision = String(snapshotRevision);
411
+ } else {
412
+ delete app.dataset.snapshotRevision;
413
+ }
414
+ if (lifecycleState && lifecycleState !== 'IDLE') {
415
+ app.dataset.lifecycleState = lifecycleState;
416
+ } else {
417
+ delete app.dataset.lifecycleState;
418
+ }
419
+ updateSolveActionAvailability();
420
+ }
421
+
422
+ function clonePlan(data) {
423
+ return JSON.parse(JSON.stringify(data));
424
+ }
425
+
426
+ function renderOverview(data) {
427
+ overviewContainer.innerHTML = '';
428
+ if ((uiModel.views || []).length) {
429
+ overviewContainer.appendChild(SF.el(
430
+ 'p',
431
+ null,
432
+ 'The generated views now mount the canonical solverforge-ui timeline surface for every planning variable declared in your project.'
433
+ ));
434
+ overviewContainer.appendChild(SF.createTable({
435
+ columns: ['Active views', 'Constraints', 'Current score'],
436
+ rows: [[
437
+ String(uiModel.views.length),
438
+ String((uiModel.constraints || []).length),
439
+ String(data.score || '—'),
440
+ ]],
441
+ }));
442
+ return;
443
+ }
444
+ overviewContainer.appendChild(SF.el('p', null, 'No planning variables are declared yet. Use `solverforge generate entity`, `generate fact`, and `generate variable` to shape the app.'));
445
+ }
446
+
447
+ function renderViews(data) {
448
+ (uiModel.views || []).forEach(function (view) {
449
+ var container = document.getElementById('view-' + view.id);
450
+ if (!container) return;
451
+ if (view.kind === 'list') {
452
+ renderTimelinePanel(
453
+ container,
454
+ view.id,
455
+ buildListViewPayload(data, view),
456
+ 'This list-variable timeline will appear once the referenced facts and entities contain data.'
457
+ );
458
+ } else {
459
+ renderTimelinePanel(
460
+ container,
461
+ view.id,
462
+ buildScalarViewPayload(data, view),
463
+ 'This scalar-variable timeline will appear once the referenced facts and entities contain data.'
464
+ );
465
+ }
466
+ });
467
+ }
468
+
469
+ function renderTimelinePanel(container, viewId, payload, emptyMessage) {
470
+ container.innerHTML = '';
471
+ if (!payload) {
472
+ destroyTimeline(viewId);
473
+ container.appendChild(SF.el('p', null, emptyMessage));
474
+ return;
475
+ }
476
+
477
+ container.appendChild(payload.summary);
478
+ container.appendChild(ensureTimeline(viewId, payload.timeline).el);
479
+ }
480
+
481
+ function ensureTimeline(viewId, timelineConfig) {
482
+ var timeline = viewTimelines[viewId];
483
+ if (!timeline) {
484
+ timeline = SF.rail.createTimeline(timelineConfig);
485
+ viewTimelines[viewId] = timeline;
486
+ return timeline;
487
+ }
488
+
489
+ timeline.setModel(timelineConfig.model);
490
+ return timeline;
491
+ }
492
+
493
+ function destroyTimeline(viewId) {
494
+ var timeline = viewTimelines[viewId];
495
+ if (!timeline) return;
496
+ timeline.destroy();
497
+ delete viewTimelines[viewId];
498
+ }
499
+
500
+ function destroyAllTimelines() {
501
+ Object.keys(viewTimelines).forEach(function (viewId) {
502
+ destroyTimeline(viewId);
503
+ });
504
+ }
505
+
506
+ function buildScalarViewPayload(data, view) {
507
+ var entities = data[view.entityPlural] || [];
508
+ var facts = data[view.sourcePlural] || [];
509
+ if (!entities.length || !facts.length) return null;
510
+
511
+ var byIndex = {};
512
+ facts.forEach(function (fact, index) {
513
+ byIndex[index] = fact;
514
+ });
515
+
516
+ var assignments = facts.map(function () { return []; });
517
+ var detached = [];
518
+ entities.forEach(function (entity) {
519
+ var idx = entity[view.variableField];
520
+ if (idx == null || byIndex[idx] == null) {
521
+ detached.push(entity);
522
+ return;
523
+ }
524
+ assignments[idx].push(entity);
525
+ });
526
+
527
+ var peakLoad = assignments.reduce(function (maxCount, items) {
528
+ return Math.max(maxCount, items.length);
529
+ }, 0);
530
+ var horizon = Math.max(peakLoad, detached.length, 1);
531
+ var axis = buildSlotAxis(horizon);
532
+ var lanes = facts.map(function (fact, factIndex) {
533
+ var items = assignments[factIndex] || [];
534
+ return {
535
+ id: view.id + '-lane-' + factIndex,
536
+ label: String(factLabel(fact, factIndex)),
537
+ mode: 'detailed',
538
+ badges: items.length ? [] : ['Empty'],
539
+ stats: [{ label: title(view.entityPlural), value: items.length }],
540
+ items: items.map(function (entity, itemIndex) {
541
+ return buildTimelineItem(
542
+ view.id + '-fact-' + factIndex + '-entity-' + itemIndex,
543
+ itemIndex,
544
+ entityLabel(entity, itemIndex),
545
+ 'Assignment ' + String(itemIndex + 1),
546
+ entityLabel(entity, itemIndex)
547
+ );
548
+ }),
549
+ };
550
+ });
551
+
552
+ if (detached.length) {
553
+ lanes.push({
554
+ id: view.id + '-detached',
555
+ label: view.allowsUnassigned ? 'Unassigned' : 'Unmapped',
556
+ mode: 'detailed',
557
+ badges: [view.allowsUnassigned ? 'Needs assignment' : 'Out of range'],
558
+ stats: [{ label: title(view.entityPlural), value: detached.length }],
559
+ items: detached.map(function (entity, itemIndex) {
560
+ return buildTimelineItem(
561
+ view.id + '-detached-' + itemIndex,
562
+ itemIndex,
563
+ entityLabel(entity, itemIndex),
564
+ view.allowsUnassigned ? 'Awaiting assignment' : 'Invalid source index',
565
+ entityLabel(entity, itemIndex)
566
+ );
567
+ }),
568
+ });
569
+ }
570
+
571
+ return {
572
+ summary: buildSummarySection(
573
+ ['Source lanes', title(view.entityPlural), 'Peak load', 'Unassigned'],
574
+ [
575
+ String(facts.length),
576
+ String(entities.length),
577
+ String(peakLoad),
578
+ String(detached.length),
579
+ ]
580
+ ),
581
+ timeline: {
582
+ label: title(view.sourcePlural),
583
+ labelWidth: 280,
584
+ title: view.label,
585
+ subtitle: title(view.entityPlural) + ' grouped by ' + title(view.sourcePlural),
586
+ model: {
587
+ axis: axis,
588
+ lanes: lanes,
589
+ },
590
+ },
591
+ };
592
+ }
593
+
594
+ function buildListViewPayload(data, view) {
595
+ var entities = data[view.entityPlural] || [];
596
+ var facts = data[view.sourcePlural] || [];
597
+ if (!entities.length || !facts.length) return null;
598
+
599
+ var byIndex = {};
600
+ facts.forEach(function (fact, index) {
601
+ byIndex[index] = fact;
602
+ });
603
+
604
+ var rows = entities.map(function (entity, entityIndex) {
605
+ var sequence = Array.isArray(entity[view.variableField]) ? entity[view.variableField] : [];
606
+ return {
607
+ entity: entity,
608
+ entityIndex: entityIndex,
609
+ sequence: sequence,
610
+ };
611
+ });
612
+
613
+ rows.sort(function (left, right) {
614
+ if (right.sequence.length !== left.sequence.length) {
615
+ return right.sequence.length - left.sequence.length;
616
+ }
617
+ return String(entityLabel(left.entity, left.entityIndex)).localeCompare(
618
+ String(entityLabel(right.entity, right.entityIndex))
619
+ );
620
+ });
621
+
622
+ var totalItems = rows.reduce(function (sum, row) {
623
+ return sum + row.sequence.length;
624
+ }, 0);
625
+ var longestSequence = rows.reduce(function (maxCount, row) {
626
+ return Math.max(maxCount, row.sequence.length);
627
+ }, 0);
628
+ var emptyEntities = rows.filter(function (row) { return row.sequence.length === 0; }).length;
629
+ var horizon = Math.max(longestSequence, 1);
630
+ var axis = buildSlotAxis(horizon);
631
+
632
+ var lanes = rows.map(function (row) {
633
+ return {
634
+ id: view.id + '-entity-' + row.entityIndex,
635
+ label: entityLabel(row.entity, row.entityIndex),
636
+ mode: 'detailed',
637
+ badges: listLaneBadges(row.sequence.length, longestSequence),
638
+ stats: [{ label: title(view.sourcePlural), value: row.sequence.length }],
639
+ items: row.sequence.map(function (factIndex, sequenceIndex) {
640
+ var fact = byIndex[factIndex];
641
+ return buildTimelineItem(
642
+ view.id + '-entity-' + row.entityIndex + '-item-' + sequenceIndex,
643
+ sequenceIndex,
644
+ factLabel(fact, factIndex),
645
+ 'Position ' + String(sequenceIndex + 1),
646
+ factLabel(fact, factIndex)
647
+ );
648
+ }),
649
+ };
650
+ });
651
+
652
+ return {
653
+ summary: buildSummarySection(
654
+ [title(view.entityPlural), title(view.sourcePlural), 'Longest sequence', 'Empty lanes', 'Average items / lane'],
655
+ [
656
+ String(rows.length),
657
+ String(totalItems),
658
+ String(longestSequence),
659
+ String(emptyEntities),
660
+ rows.length ? (totalItems / rows.length).toFixed(1) : '0.0',
661
+ ]
662
+ ),
663
+ timeline: {
664
+ label: title(view.entityPlural),
665
+ labelWidth: 280,
666
+ title: view.label,
667
+ subtitle: title(view.sourcePlural) + ' ordered inside each ' + title(view.entityPlural),
668
+ model: {
669
+ axis: axis,
670
+ lanes: lanes,
671
+ },
672
+ },
673
+ };
674
+ }
675
+
676
+ function buildSummarySection(columns, row) {
677
+ var section = SF.el('div', { className: 'sf-section' });
678
+ section.appendChild(SF.createTable({
679
+ columns: columns,
680
+ rows: [row],
681
+ }));
682
+ return section;
683
+ }
684
+
685
+ function buildSlotAxis(slotCount) {
686
+ var normalizedSlots = Math.max(slotCount, 1);
687
+ var groupSize = normalizedSlots > 24 ? 8 : (normalizedSlots > 12 ? 6 : 4);
688
+ var days = [];
689
+ var ticks = [];
690
+
691
+ for (var startSlot = 0; startSlot < normalizedSlots; startSlot += groupSize) {
692
+ var endSlot = Math.min(normalizedSlots, startSlot + groupSize);
693
+ days.push({
694
+ id: 'window-' + startSlot,
695
+ label: 'Window ' + String(days.length + 1),
696
+ subLabel: slotRangeLabel(startSlot, endSlot),
697
+ startMinute: startSlot * SLOT_MINUTES,
698
+ endMinute: endSlot * SLOT_MINUTES,
699
+ });
700
+ }
701
+
702
+ for (var slotIndex = 0; slotIndex < normalizedSlots; slotIndex += 1) {
703
+ ticks.push({
704
+ id: 'tick-' + slotIndex,
705
+ minute: slotIndex * SLOT_MINUTES,
706
+ label: 'Slot ' + String(slotIndex + 1),
707
+ });
708
+ }
709
+
710
+ return {
711
+ startMinute: 0,
712
+ endMinute: normalizedSlots * SLOT_MINUTES,
713
+ days: days,
714
+ ticks: ticks,
715
+ initialViewport: {
716
+ startMinute: 0,
717
+ endMinute: Math.min(normalizedSlots, DEFAULT_VIEWPORT_SLOTS) * SLOT_MINUTES,
718
+ },
719
+ };
720
+ }
721
+
722
+ function buildTimelineItem(id, slotIndex, label, meta, toneKey) {
723
+ return {
724
+ id: id,
725
+ startMinute: slotIndex * SLOT_MINUTES,
726
+ endMinute: (slotIndex + 1) * SLOT_MINUTES,
727
+ label: String(label),
728
+ meta: meta || '',
729
+ tone: toneForKey(toneKey || label),
730
+ };
731
+ }
732
+
733
+ function slotRangeLabel(startSlot, endSlot) {
734
+ if (endSlot - startSlot <= 1) {
735
+ return 'Slot ' + String(startSlot + 1);
736
+ }
737
+ return 'Slots ' + String(startSlot + 1) + '-' + String(endSlot);
738
+ }
739
+
740
+ function listLaneBadges(length, longestSequence) {
741
+ if (length === 0) return ['Empty'];
742
+ var badges = [];
743
+ if (length === longestSequence) badges.push('Longest');
744
+ if (length === 1) badges.push('Single');
745
+ return badges;
746
+ }
747
+
748
+ function toneForKey(key) {
749
+ var text = String(key || '');
750
+ var hash = 0;
751
+
752
+ for (var index = 0; index < text.length; index += 1) {
753
+ hash = ((hash * 31) + text.charCodeAt(index)) >>> 0;
754
+ }
755
+
756
+ return TIMELINE_TONES[hash % TIMELINE_TONES.length];
757
+ }
758
+
759
+ function renderTables(data) {
760
+ tablesContainer.innerHTML = '';
761
+ (uiModel.entities || []).concat(uiModel.facts || []).forEach(function (entry) {
762
+ var rows = data[entry.plural] || [];
763
+ if (!rows.length) return;
764
+ var cols = Object.keys(rows[0]).filter(function (key) { return key !== 'score' && key !== 'solverStatus'; });
765
+ var values = rows.map(function (row) {
766
+ return cols.map(function (key) {
767
+ var value = row[key];
768
+ if (value == null) return '—';
769
+ if (Array.isArray(value)) return value.join(', ');
770
+ if (typeof value === 'object') return JSON.stringify(value);
771
+ return String(value);
772
+ });
773
+ });
774
+ var section = SF.el('div', { className: 'sf-section' });
775
+ section.appendChild(SF.el('h3', null, entry.label));
776
+ section.appendChild(SF.createTable({ columns: cols, rows: values }));
777
+ tablesContainer.appendChild(section);
778
+ });
779
+ }
780
+
781
+ function buildAnalysisHtml(analysis) {
782
+ if (!analysis || !analysis.constraints) return '<p>No analysis available.</p>';
783
+ var html = '<p><strong>Score:</strong> ' + SF.escHtml(analysis.score) + '</p>';
784
+ html += '<table class="sf-table"><thead><tr><th>Constraint</th><th>Type</th><th>Score</th><th>Matches</th></tr></thead><tbody>';
785
+ analysis.constraints.forEach(function (constraint) {
786
+ var matchCount = constraint.matchCount != null ? constraint.matchCount : (constraint.matches ? constraint.matches.length : 0);
787
+ html += '<tr><td>' + SF.escHtml(constraint.name) + '</td><td>' + SF.escHtml(constraint.constraintType || constraint.type || '') + '</td><td>' + SF.escHtml(constraint.score) + '</td><td>' + matchCount + '</td></tr>';
788
+ });
789
+ html += '</tbody></table>';
790
+ return html;
791
+ }
792
+
793
+ function factLabel(fact, fallback) {
794
+ if (!fact) return String(fallback);
795
+ return fact.name || fact.id || fallback;
796
+ }
797
+
798
+ function entityLabel(entity, fallback) {
799
+ if (!entity) return String(fallback);
800
+ return entity.name || entity.id || fallback;
801
+ }
802
+
803
+ function title(text) {
804
+ return String(text || '')
805
+ .replace(/_/g, ' ')
806
+ .replace(/\b\w/g, function (match) { return match.toUpperCase(); });
807
+ }
808
+ })();
static/generated/ui-model.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "entities": [],
3
+ "facts": [],
4
+ "constraints": [],
5
+ "views": []
6
+ }
static/index.html ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>solverforge-fsr — SolverForge</title>
7
+ <link rel="stylesheet" href="/sf/sf.css">
8
+ <link rel="stylesheet" href="/sf/vendor/fontawesome/css/fontawesome.min.css">
9
+ <link rel="stylesheet" href="/sf/vendor/fontawesome/css/solid.min.css">
10
+ <link rel="icon" href="/sf/img/ouroboros.svg" type="image/svg+xml">
11
+ </head>
12
+ <body>
13
+ <div id="sf-app"></div>
14
+ <script src="/sf/sf.js"></script>
15
+ <script src="/app.js"></script>
16
+ </body>
17
+ </html>
static/sf-config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "title": "solverforge-fsr",
3
+ "subtitle": "Constraint Optimizer"
4
+ }