|
|
|
|
|
|
|
|
use axum::{ |
|
|
extract::{Path, State}, |
|
|
http::StatusCode, |
|
|
routing::{delete, get, post, put}, |
|
|
Json, Router, |
|
|
}; |
|
|
use chrono::{NaiveDate, NaiveDateTime}; |
|
|
use parking_lot::RwLock; |
|
|
use serde::{Deserialize, Serialize}; |
|
|
use std::collections::{HashMap, HashSet}; |
|
|
use std::sync::Arc; |
|
|
use uuid::Uuid; |
|
|
|
|
|
use crate::demo_data::{self, DemoData}; |
|
|
use crate::domain::{Employee, EmployeeSchedule, Shift}; |
|
|
|
|
|
|
|
|
struct SolveJob { |
|
|
solution: EmployeeSchedule, |
|
|
solver_status: String, |
|
|
} |
|
|
|
|
|
|
|
|
pub struct AppState { |
|
|
jobs: RwLock<HashMap<String, SolveJob>>, |
|
|
} |
|
|
|
|
|
impl AppState { |
|
|
pub fn new() -> Self { |
|
|
Self { |
|
|
jobs: RwLock::new(HashMap::new()), |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
impl Default for AppState { |
|
|
fn default() -> Self { |
|
|
Self::new() |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)] |
|
|
#[serde(rename_all = "camelCase")] |
|
|
pub struct EmployeeDto { |
|
|
pub name: String, |
|
|
pub skills: Vec<String>, |
|
|
#[serde(default)] |
|
|
pub unavailable_dates: Vec<NaiveDate>, |
|
|
#[serde(default)] |
|
|
pub undesired_dates: Vec<NaiveDate>, |
|
|
#[serde(default)] |
|
|
pub desired_dates: Vec<NaiveDate>, |
|
|
} |
|
|
|
|
|
impl From<&Employee> for EmployeeDto { |
|
|
fn from(e: &Employee) -> Self { |
|
|
Self { |
|
|
name: e.name.clone(), |
|
|
skills: e.skills.iter().cloned().collect(), |
|
|
unavailable_dates: e.unavailable_dates.iter().cloned().collect(), |
|
|
undesired_dates: e.undesired_dates.iter().cloned().collect(), |
|
|
desired_dates: e.desired_dates.iter().cloned().collect(), |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
impl EmployeeDto { |
|
|
fn to_employee(&self, index: usize) -> Employee { |
|
|
let unavailable_dates: HashSet<NaiveDate> = |
|
|
self.unavailable_dates.iter().cloned().collect(); |
|
|
let undesired_dates: HashSet<NaiveDate> = |
|
|
self.undesired_dates.iter().cloned().collect(); |
|
|
let desired_dates: HashSet<NaiveDate> = |
|
|
self.desired_dates.iter().cloned().collect(); |
|
|
|
|
|
let mut unavailable_days: Vec<NaiveDate> = unavailable_dates.iter().copied().collect(); |
|
|
unavailable_days.sort(); |
|
|
let mut undesired_days: Vec<NaiveDate> = undesired_dates.iter().copied().collect(); |
|
|
undesired_days.sort(); |
|
|
let mut desired_days: Vec<NaiveDate> = desired_dates.iter().copied().collect(); |
|
|
desired_days.sort(); |
|
|
|
|
|
Employee { |
|
|
index, |
|
|
name: self.name.clone(), |
|
|
skills: self.skills.iter().cloned().collect(), |
|
|
unavailable_dates, |
|
|
undesired_dates, |
|
|
desired_dates, |
|
|
unavailable_days, |
|
|
undesired_days, |
|
|
desired_days, |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)] |
|
|
#[serde(rename_all = "camelCase")] |
|
|
pub struct ShiftDto { |
|
|
pub id: String, |
|
|
pub start: NaiveDateTime, |
|
|
pub end: NaiveDateTime, |
|
|
pub location: String, |
|
|
pub required_skill: String, |
|
|
pub employee: Option<EmployeeDto>, |
|
|
} |
|
|
|
|
|
|
|
|
#[derive(Debug, Serialize, Deserialize)] |
|
|
#[serde(rename_all = "camelCase")] |
|
|
pub struct ScheduleDto { |
|
|
pub employees: Vec<EmployeeDto>, |
|
|
pub shifts: Vec<ShiftDto>, |
|
|
#[serde(default)] |
|
|
pub score: Option<String>, |
|
|
#[serde(default, skip_deserializing)] |
|
|
pub solver_status: Option<String>, |
|
|
} |
|
|
|
|
|
impl ScheduleDto { |
|
|
pub fn from_schedule(schedule: &EmployeeSchedule, solver_status: Option<String>) -> Self { |
|
|
let employees: Vec<EmployeeDto> = schedule.employees.iter().map(EmployeeDto::from).collect(); |
|
|
|
|
|
let shifts: Vec<ShiftDto> = schedule |
|
|
.shifts |
|
|
.iter() |
|
|
.map(|s| ShiftDto { |
|
|
id: s.id.clone(), |
|
|
start: s.start, |
|
|
end: s.end, |
|
|
location: s.location.clone(), |
|
|
required_skill: s.required_skill.clone(), |
|
|
employee: s.employee_idx |
|
|
.and_then(|idx| schedule.employees.get(idx)) |
|
|
.map(EmployeeDto::from), |
|
|
}) |
|
|
.collect(); |
|
|
|
|
|
Self { |
|
|
employees, |
|
|
shifts, |
|
|
score: schedule.score.map(|s| format!("{}", s)), |
|
|
solver_status, |
|
|
} |
|
|
} |
|
|
|
|
|
pub fn to_domain(&self) -> EmployeeSchedule { |
|
|
|
|
|
let employees: Vec<Employee> = self |
|
|
.employees |
|
|
.iter() |
|
|
.enumerate() |
|
|
.map(|(i, dto)| dto.to_employee(i)) |
|
|
.collect(); |
|
|
let name_to_idx: std::collections::HashMap<&str, usize> = employees |
|
|
.iter() |
|
|
.map(|e| (e.name.as_str(), e.index)) |
|
|
.collect(); |
|
|
|
|
|
let shifts: Vec<Shift> = self |
|
|
.shifts |
|
|
.iter() |
|
|
.map(|s| Shift { |
|
|
id: s.id.clone(), |
|
|
start: s.start, |
|
|
end: s.end, |
|
|
location: s.location.clone(), |
|
|
required_skill: s.required_skill.clone(), |
|
|
employee_idx: s.employee.as_ref().and_then(|e| name_to_idx.get(e.name.as_str()).copied()), |
|
|
}) |
|
|
.collect(); |
|
|
|
|
|
EmployeeSchedule::new(employees, shifts) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pub fn router(state: Arc<AppState>) -> Router { |
|
|
Router::new() |
|
|
|
|
|
.route("/health", get(health)) |
|
|
.route("/info", get(info)) |
|
|
|
|
|
.route("/demo-data", get(list_demo_data)) |
|
|
.route("/demo-data/{id}", get(get_demo_data)) |
|
|
|
|
|
.route("/schedules", post(create_schedule)) |
|
|
.route("/schedules", get(list_schedules)) |
|
|
.route("/schedules/analyze", put(analyze_schedule)) |
|
|
.route("/schedules/{id}", get(get_schedule)) |
|
|
.route("/schedules/{id}/status", get(get_schedule_status)) |
|
|
.route("/schedules/{id}", delete(stop_solving)) |
|
|
.with_state(state) |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Serialize)] |
|
|
pub struct HealthResponse { |
|
|
pub status: &'static str, |
|
|
} |
|
|
|
|
|
|
|
|
async fn health() -> Json<HealthResponse> { |
|
|
Json(HealthResponse { status: "UP" }) |
|
|
} |
|
|
|
|
|
#[derive(Debug, Serialize)] |
|
|
#[serde(rename_all = "camelCase")] |
|
|
pub struct InfoResponse { |
|
|
pub name: &'static str, |
|
|
pub version: &'static str, |
|
|
pub solver_engine: &'static str, |
|
|
} |
|
|
|
|
|
|
|
|
async fn info() -> Json<InfoResponse> { |
|
|
Json(InfoResponse { |
|
|
name: "Employee Scheduling", |
|
|
version: env!("CARGO_PKG_VERSION"), |
|
|
solver_engine: "SolverForge-RS", |
|
|
}) |
|
|
} |
|
|
|
|
|
|
|
|
async fn list_demo_data() -> Json<Vec<&'static str>> { |
|
|
Json(demo_data::list_demo_data()) |
|
|
} |
|
|
|
|
|
|
|
|
async fn get_demo_data(Path(id): Path<String>) -> Result<Json<ScheduleDto>, StatusCode> { |
|
|
match id.parse::<DemoData>() { |
|
|
Ok(demo) => { |
|
|
let schedule = demo_data::generate(demo); |
|
|
Ok(Json(ScheduleDto::from_schedule(&schedule, None))) |
|
|
} |
|
|
Err(_) => Err(StatusCode::NOT_FOUND), |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
async fn create_schedule( |
|
|
State(state): State<Arc<AppState>>, |
|
|
Json(dto): Json<ScheduleDto>, |
|
|
) -> String { |
|
|
let id = Uuid::new_v4().to_string(); |
|
|
let schedule = dto.to_domain(); |
|
|
|
|
|
|
|
|
{ |
|
|
let mut jobs = state.jobs.write(); |
|
|
jobs.insert(id.clone(), SolveJob { |
|
|
solution: schedule.clone(), |
|
|
solver_status: "SOLVING".to_string(), |
|
|
}); |
|
|
} |
|
|
|
|
|
|
|
|
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel(); |
|
|
let job_id = id.clone(); |
|
|
let state_clone = state.clone(); |
|
|
|
|
|
tokio::spawn(async move { |
|
|
while let Some((solution, _score)) = rx.recv().await { |
|
|
let mut jobs = state_clone.jobs.write(); |
|
|
if let Some(job) = jobs.get_mut(&job_id) { |
|
|
job.solution = solution; |
|
|
} |
|
|
} |
|
|
|
|
|
let mut jobs = state_clone.jobs.write(); |
|
|
if let Some(job) = jobs.get_mut(&job_id) { |
|
|
job.solver_status = "NOT_SOLVING".to_string(); |
|
|
} |
|
|
}); |
|
|
|
|
|
|
|
|
use solverforge::Solvable; |
|
|
rayon::spawn(move || { |
|
|
schedule.solve(None, tx); |
|
|
}); |
|
|
|
|
|
id |
|
|
} |
|
|
|
|
|
|
|
|
async fn list_schedules(State(state): State<Arc<AppState>>) -> Json<Vec<String>> { |
|
|
Json(state.jobs.read().keys().cloned().collect()) |
|
|
} |
|
|
|
|
|
|
|
|
async fn get_schedule( |
|
|
State(state): State<Arc<AppState>>, |
|
|
Path(id): Path<String>, |
|
|
) -> Result<Json<ScheduleDto>, StatusCode> { |
|
|
match state.jobs.read().get(&id) { |
|
|
Some(job) => { |
|
|
Ok(Json(ScheduleDto::from_schedule(&job.solution, Some(job.solver_status.clone())))) |
|
|
} |
|
|
None => Err(StatusCode::NOT_FOUND), |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
#[derive(Debug, Serialize)] |
|
|
#[serde(rename_all = "camelCase")] |
|
|
pub struct StatusResponse { |
|
|
pub score: Option<String>, |
|
|
} |
|
|
|
|
|
|
|
|
async fn get_schedule_status( |
|
|
State(state): State<Arc<AppState>>, |
|
|
Path(id): Path<String>, |
|
|
) -> Result<Json<StatusResponse>, StatusCode> { |
|
|
match state.jobs.read().get(&id) { |
|
|
Some(job) => { |
|
|
Ok(Json(StatusResponse { |
|
|
score: job.solution.score.map(|s| format!("{}", s)), |
|
|
})) |
|
|
} |
|
|
None => Err(StatusCode::NOT_FOUND), |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
async fn stop_solving( |
|
|
State(state): State<Arc<AppState>>, |
|
|
Path(id): Path<String>, |
|
|
) -> StatusCode { |
|
|
if state.jobs.write().remove(&id).is_some() { |
|
|
StatusCode::NO_CONTENT |
|
|
} else { |
|
|
StatusCode::NOT_FOUND |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
#[derive(Debug, Serialize)] |
|
|
#[serde(rename_all = "camelCase")] |
|
|
pub struct ConstraintAnalysisDto { |
|
|
pub name: String, |
|
|
#[serde(rename = "type")] |
|
|
pub constraint_type: String, |
|
|
pub weight: String, |
|
|
pub score: String, |
|
|
pub matches: Vec<ConstraintMatchDto>, |
|
|
} |
|
|
|
|
|
|
|
|
#[derive(Debug, Serialize)] |
|
|
#[serde(rename_all = "camelCase")] |
|
|
pub struct ConstraintMatchDto { |
|
|
pub score: String, |
|
|
pub justification: String, |
|
|
} |
|
|
|
|
|
|
|
|
#[derive(Debug, Serialize)] |
|
|
#[serde(rename_all = "camelCase")] |
|
|
pub struct AnalyzeResponse { |
|
|
pub score: String, |
|
|
pub constraints: Vec<ConstraintAnalysisDto>, |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async fn analyze_schedule( |
|
|
Json(dto): Json<ScheduleDto>, |
|
|
) -> Json<AnalyzeResponse> { |
|
|
use crate::constraints::create_fluent_constraints; |
|
|
use solverforge::{ConstraintSet, TypedScoreDirector}; |
|
|
|
|
|
let schedule = dto.to_domain(); |
|
|
|
|
|
|
|
|
let constraints = create_fluent_constraints(); |
|
|
let mut director = TypedScoreDirector::new(schedule, constraints); |
|
|
|
|
|
let score = director.calculate_score(); |
|
|
|
|
|
|
|
|
let analyses = director.constraints().evaluate_detailed(director.working_solution()); |
|
|
|
|
|
let constraints_dto: Vec<ConstraintAnalysisDto> = analyses |
|
|
.into_iter() |
|
|
.map(|analysis| { |
|
|
ConstraintAnalysisDto { |
|
|
name: analysis.constraint_ref.name.clone(), |
|
|
constraint_type: if analysis.is_hard { "hard" } else { "soft" }.to_string(), |
|
|
weight: format!("{}", analysis.weight), |
|
|
score: format!("{}", analysis.score), |
|
|
matches: analysis |
|
|
.matches |
|
|
.iter() |
|
|
.map(|m| ConstraintMatchDto { |
|
|
score: format!("{}", m.score), |
|
|
justification: m.justification.description.clone(), |
|
|
}) |
|
|
.collect(), |
|
|
} |
|
|
}) |
|
|
.collect(); |
|
|
|
|
|
Json(AnalyzeResponse { |
|
|
score: format!("{}", score), |
|
|
constraints: constraints_dto, |
|
|
}) |
|
|
} |
|
|
|