Skip to content

Instantly share code, notes, and snippets.

@dgehriger
Created April 16, 2026 21:27
Show Gist options
  • Select an option

  • Save dgehriger/0b4994fa8b0111f4cbe0b6f38f1f27bb to your computer and use it in GitHub Desktop.

Select an option

Save dgehriger/0b4994fa8b0111f4cbe0b6f38f1f27bb to your computer and use it in GitHub Desktop.
Repository Wiki — generated by GitNexus
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>cdim-plan — Wiki</title>
<script src="https://cdn.jsdelivr.net/npm/marked@11.0.0/marked.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/mermaid@11/dist/mermaid.min.js"></script>
<style>
*{margin:0;padding:0;box-sizing:border-box}
:root{
--bg:#ffffff;--sidebar-bg:#f8f9fb;--border:#e5e7eb;
--text:#1e293b;--text-muted:#64748b;--primary:#2563eb;
--primary-soft:#eff6ff;--hover:#f1f5f9;--code-bg:#f1f5f9;
--radius:8px;--shadow:0 1px 3px rgba(0,0,0,.08);
}
body{font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,sans-serif;
line-height:1.65;color:var(--text);background:var(--bg)}
.layout{display:flex;min-height:100vh}
.sidebar{width:280px;background:var(--sidebar-bg);border-right:1px solid var(--border);
position:fixed;top:0;left:0;bottom:0;overflow-y:auto;padding:24px 16px;
display:flex;flex-direction:column;z-index:10}
.content{margin-left:280px;flex:1;padding:48px 64px;max-width:960px}
.sidebar-header{margin-bottom:20px;padding-bottom:16px;border-bottom:1px solid var(--border)}
.sidebar-title{font-size:16px;font-weight:700;color:var(--text);display:flex;align-items:center;gap:8px}
.sidebar-title svg{flex-shrink:0}
.sidebar-meta{font-size:11px;color:var(--text-muted);margin-top:6px}
.nav-section{margin-bottom:2px}
.nav-item{display:block;padding:7px 12px;border-radius:var(--radius);cursor:pointer;
font-size:13px;color:var(--text);text-decoration:none;transition:all .15s;
white-space:nowrap;overflow:hidden;text-overflow:ellipsis}
.nav-item:hover{background:var(--hover)}
.nav-item.active{background:var(--primary-soft);color:var(--primary);font-weight:600}
.nav-item.overview{font-weight:600;margin-bottom:4px}
.nav-children{padding-left:14px;border-left:1px solid var(--border);margin-left:12px}
.nav-group-label{font-size:11px;font-weight:600;color:var(--text-muted);
text-transform:uppercase;letter-spacing:.5px;padding:12px 12px 4px;user-select:none}
.sidebar-footer{margin-top:auto;padding-top:16px;border-top:1px solid var(--border);
font-size:11px;color:var(--text-muted);text-align:center}
.content h1{font-size:28px;font-weight:700;margin-bottom:8px;line-height:1.3}
.content h2{font-size:22px;font-weight:600;margin:32px 0 12px;padding-bottom:6px;border-bottom:1px solid var(--border)}
.content h3{font-size:17px;font-weight:600;margin:24px 0 8px}
.content h4{font-size:15px;font-weight:600;margin:20px 0 6px}
.content p{margin:12px 0}
.content ul,.content ol{margin:12px 0 12px 24px}
.content li{margin:4px 0}
.content a{color:var(--primary);text-decoration:none}
.content a:hover{text-decoration:underline}
.content blockquote{border-left:3px solid var(--primary);padding:8px 16px;margin:16px 0;
background:var(--primary-soft);border-radius:0 var(--radius) var(--radius) 0;
color:var(--text-muted);font-size:14px}
.content code{font-family:'SF Mono',Consolas,'Courier New',monospace;font-size:13px;
background:var(--code-bg);padding:2px 6px;border-radius:4px}
.content pre{background:#1e293b;color:#e2e8f0;border-radius:var(--radius);padding:16px;
overflow-x:auto;margin:16px 0}
.content pre code{background:none;padding:0;font-size:13px;line-height:1.6;color:inherit}
.content table{border-collapse:collapse;width:100%;margin:16px 0}
.content th,.content td{border:1px solid var(--border);padding:8px 12px;text-align:left;font-size:14px}
.content th{background:var(--sidebar-bg);font-weight:600}
.content img{max-width:100%;border-radius:var(--radius)}
.content hr{border:none;border-top:1px solid var(--border);margin:32px 0}
.content .mermaid{margin:20px 0;text-align:center}
.menu-toggle{display:none;position:fixed;top:12px;left:12px;z-index:20;
background:var(--bg);border:1px solid var(--border);border-radius:var(--radius);
padding:8px 12px;cursor:pointer;font-size:18px;box-shadow:var(--shadow)}
@media(max-width:768px){
.sidebar{transform:translateX(-100%);transition:transform .2s}
.sidebar.open{transform:translateX(0);box-shadow:2px 0 12px rgba(0,0,0,.1)}
.content{margin-left:0;padding:24px 20px;padding-top:56px}
.menu-toggle{display:block}
}
.empty-state{text-align:center;padding:80px 20px;color:var(--text-muted)}
.empty-state h2{font-size:20px;margin-bottom:8px;border:none}
</style>
</head>
<body>
<button class="menu-toggle" id="menu-toggle" aria-label="Toggle menu">&#9776;</button>
<div class="layout">
<nav class="sidebar" id="sidebar">
<div class="sidebar-header">
<div class="sidebar-title">
<svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><path d="M2 3h6a4 4 0 014 4v14a3 3 0 00-3-3H2z"/><path d="M22 3h-6a4 4 0 00-4 4v14a3 3 0 013-3h7z"/></svg>
cdim-plan
</div>
<div class="sidebar-meta" id="meta-info"></div>
</div>
<div id="nav-tree"></div>
<div class="sidebar-footer">Generated by GitNexus</div>
</nav>
<main class="content" id="content">
<div class="empty-state"><h2>Loading…</h2></div>
</main>
</div>
<script>
var PAGES = {"backend-adapters-persistence":"# Backend Adapters & Persistence\n\nBackend Adapters & Persistence\n=============================\n\nPurpose\n-------\nThis module contains concrete persistence implementations for the application ports defined in cm_plan.domain. It provides two interchangeable backends:\n\n- SQLite implementation (src/cm_plan/adapters/sqlite_adapter.py)\n- PostgreSQL implementation (src/cm_plan/adapters/pg_adapter.py)\n\nBoth adapters store the same domain objects (Planning, SemesterContext, RuleSet, AuditEntry, User) as JSON blobs and expose repository classes with the same method names so the application can swap backends by changing the connection factory used by dependency plumbing.\n\nKey concepts\n------------\n- Data model objects are serialized to JSON (Pydantic model_dump_json when available) and stored as text/JSONB.\n- Each \"repository\" class corresponds to a domain persistence port:\n - Planning persistence: SqlitePlanningRepository / PgPlanningRepository\n - Semester reference data: SqliteSemesterContextRepository / PgSemesterContextRepository\n - Rule sets: SqliteRuleSetRepository / PgRuleSetRepository\n - Audit journal: SqliteAuditLog / PgAuditLog\n - Users: SqliteUserRepository / PgUserRepository\n- Both adapters ensure their DB schema exists on connection (connect / pg_connect call _ensure_schema).\n- Write operations call commit after the transaction in SQLite and explicitly commit on the psycopg connection in PostgreSQL.\n\nMermaid overview (light)\n------------------------\nA small diagram to show how connection factories and repositories relate:\n\n```mermaid\nflowchart LR\n subgraph ConnFactories\n A[connect(db_path)] --> C[sqlite3.Connection]\n B[pg_connect(dsn)] --> D[psycopg.Connection]\n end\n C --> R1[SqlitePlanningRepository]\n C --> R2[SqliteUserRepository]\n D --> R3[PgPlanningRepository]\n D --> R4[PgUserRepository]\n R1 --> M[Planning (domain)]\n R3 --> M\n```\n\nImplementation notes\n--------------------\nSerialization\n- _serialize(obj: Any) -> str is used in both adapters.\n - If the object has model_dump_json (Pydantic v2 style), it is used.\n - Otherwise json.dumps(obj, default=str) is used.\n- Repositories persist model_dump_json strings for domain objects and use model_validate_json(...) on reads to reconstruct domain models.\n\nSchema differences\n- SQLite stores JSON blobs as TEXT and timestamps as TEXT using ISO 8601 strings.\n- PostgreSQL uses JSONB for the data columns and TIMESTAMPTZ for timestamps. The DDL for PostgreSQL is defined in _SCHEMA (pg_adapter).\n- Both schemas define indexes for audit_log to allow fast lookups by planning_id, user and action.\n\nConnection factories\n- sqlite_adapter.connect(db_path: str | Path = \":memory:\") -> sqlite3.Connection\n - Sets PRAGMA journal_mode=WAL and PRAGMA foreign_keys=ON.\n - Calls _ensure_schema(conn) to create tables if missing.\n - Use \":memory:\" for tests or a file path in production.\n- pg_adapter.pg_connect(dsn: str) -> psycopg.Connection[Any]\n - Opens a psycopg connection with autocommit=False.\n - Calls _ensure_schema(conn) and returns the connection.\n - dsn is a libpq-style connection string (e.g. postgresql://user:pass@host:5432/cm_plan).\n\nRepositories — public API and behaviors\n--------------------------------------\n\nCommon patterns\n- Read methods that return domain models call Model.model_validate_json(json_text).\n- Write methods serialize models and commit changes on the connection.\n- The code tolerates psycopg returning JSON as Python objects: when reading row[0] it checks whether it's already a str and calls json.dumps(row[0]) if not.\n\nSqlitePlanningRepository / PgPlanningRepository\n- Purpose: versioned persistence of Planning entities.\n- Constructor:\n - Sqlite: SqlitePlanningRepository(conn: sqlite3.Connection) — calls _ensure_schema(conn).\n - Pg: PgPlanningRepository(conn: psycopg.Connection[Any]) — does not call _ensure_schema here (pg_connect does).\n- Methods:\n - save(planning: Planning) -> None\n - Computes next version by SELECT MAX(version) for the planning_id.\n - Inserts a new row with the serialized planning into plannings and commits.\n - get(planning_id: uuid.UUID, version: int | None = None) -> Planning | None\n - If version is None returns latest (ORDER BY version DESC LIMIT 1).\n - Returns None if not found.\n - list_versions(planning_id: uuid.UUID) -> list[int]\n - Returns list of version numbers ordered ascending.\n - set_status(planning_id: uuid.UUID, version: int, status: PlanningStatus) -> None\n - Loads the stored Planning JSON, validates into a Planning, mutates planning.status and updates published_at when status == PlanningStatus.PUBLIE.\n - Writes back updated JSON into the same row (SQLite UPDATE or PostgreSQL UPDATE) and commits.\n - PostgreSQL implementation uses COALESCE(%s, published_at) so published_at is only set when transitioning to PUBLIE.\n - list_plannings(status: PlanningStatus | None = None) -> list[Planning]\n - Returns the latest version per planning_id (subquery selects MAX(version) per planning_id).\n - If status is provided, filters by that status on the latest version.\n\nSqliteSemesterContextRepository / PgSemesterContextRepository\n- Purpose: persist SemesterContext reference data keyed by semester_id.\n- Methods:\n - save(ctx: SemesterContext, semester_id: str | None = None) -> str\n - Computes a default semester_id if not provided: \"{semester_start}_{semester_end}\" using ISO dates.\n - Inserts or replaces (SQLite) / INSERT ... ON CONFLICT DO UPDATE (Postgres) the data JSON.\n - Commits and returns the semester_id used.\n - get(semester_id: str) -> SemesterContext | None\n - Returns None if not found; otherwise reconstructs with SemesterContext.model_validate_json(...)\n - list_semesters() -> list[str]\n\nSqliteRuleSetRepository / PgRuleSetRepository\n- Purpose: store RuleSet objects (versioned logically but stored keyed by ruleset_id with fields version, name).\n- Methods:\n - save(ruleset: RuleSet) -> None\n - Inserts or replaces / ON CONFLICT DO UPDATE; commits.\n - get(ruleset_id: uuid.UUID) -> RuleSet | None\n - list_rulesets() -> list[RuleSet]\n\nSqliteAuditLog / PgAuditLog\n- Purpose: write-ahead audit journal of actions.\n- Methods:\n - record(*, action: str, user: str, detail: str = \"\", payload_before: str = \"\", payload_after: str = \"\", planning_id: uuid.UUID | None = None) -> None\n - Inserts an audit row with generated UUID id and timestamp datetime.now().isoformat() and commits.\n - query(*, planning_id: uuid.UUID | None = None, user: str | None = None, action: str | None = None, since: datetime | None = None, limit: int = 100) -> list[AuditEntry]\n - Builds a WHERE clause dynamically from provided filters and returns AuditEntry domain objects (converting id to uuid.UUID and timestamp to datetime).\n - Note: PostgreSQL rows may have timestamp as a datetime already; the implementation handles both str and datetime for timestamp.\n - purge(before: datetime) -> int\n - Deletes rows older than before and returns number deleted.\n\nSqliteUserRepository / PgUserRepository\n- Purpose: CRUD for authentication users, used by get_current_user dependency.\n- Methods:\n - save(user: User) -> None\n - Roles are stored as JSON array of role names (e.g. [\"LECTEUR\"]). In SQLite roles is TEXT, in Postgres roles is JSONB.\n - For SQLite is_active is stored as INTEGER 0/1; PostgreSQL uses BOOLEAN.\n - Commits after insert/update.\n - get_by_username(username: str) -> User | None\n - get_by_id(user_id: uuid.UUID) -> User | None\n - PgUserRepository.get_by_id calls _row_to_user(row) internally (see call graph).\n - list_users() -> list[User]\n - delete(user_id: uuid.UUID) -> bool\n - Returns True if a row was deleted; commits after delete.\n\nRepository usage examples\n-------------------------\nTypical usage in application code (actual DI is handled elsewhere in the project):\n\n- SQLite\n - conn = connect(\":memory:\") # or path to DB file\n - planning_repo = SqlitePlanningRepository(conn)\n - planning_repo.save(planning)\n - p = planning_repo.get(planning_id)\n\n- PostgreSQL\n - conn = pg_connect(\"postgresql://user:pass@host/db\")\n - planning_repo = PgPlanningRepository(conn)\n - planning_repo.save(planning)\n\nIntegration with the rest of the codebase\n-----------------------------------------\n- Domain models consumed/produced:\n - Planning (cm_plan.domain.types)\n - SemesterContext (cm_plan.domain.types)\n - RuleSet (cm_plan.domain.rules.RuleSet)\n - AuditEntry (cm_plan.domain.ports.AuditEntry)\n - User (cm_plan.domain.auth.User)\n - AppRole (cm_plan.domain.types.AppRole)\n - PlanningStatus (cm_plan.domain.types.PlanningStatus)\n- The API dependency get_current_user uses either SqliteUserRepository or PgUserRepository depending on configured DI. The call flow in tests shows Get_current_user → get_user_repo → SqliteUserRepository when running with SQLite in tests; a similar flow exists for PostgreSQL.\n- Tests in tests/test_persistence.py exercise the SQLite repositories via connect(...), exercising save/get/list_versions/set_status/list_plannings and the SemesterContext repository.\n\nConcurrency and transactional behavior\n-------------------------------------\n- SQLite:\n - connect(...) sets check_same_thread=False to allow sharing the connection across threads (careful: sqlite3 connections are not fully thread-safe for concurrent writes).\n - Each write operation immediately commits.\n - PRAGMA journal_mode=WAL is set to improve concurrent reads.\n- PostgreSQL:\n - psycopg.connect(..., autocommit=False) is used; repository write methods explicitly call conn.commit() after a cursor block.\n - pg_adapter uses cursor(row_factory=tuple_row) to get tuples back consistently.\n - _ensure_schema(conn) runs the DDL once on connection.\n\nEdge cases and implementation details\n------------------------------------\n- JSON handling:\n - PostgreSQL may return JSONB columns as Python objects; the code normalizes by json.dumps(row[0]) if the returned value is not already a str before calling model_validate_json.\n - SQLite stores JSON as text; model_validate_json consumes the stored string.\n- Published timestamp on status change:\n - set_status sets published_at to datetime.now().isoformat() only when status == PlanningStatus.PUBLIE; otherwise published_at remains unchanged.\n- Audit timestamp conversion:\n - PgAuditLog.query handles timestamps returned either as strings or already as datetime objects from psycopg.\n- Role conversion:\n - Users.roles are stored as JSON arrays of role strings; when reconstructing User objects the code maps back to AppRole enums.\n\nTesting notes\n-------------\n- The test suite uses the SQLite adapter and connect(\":memory:\") in tests/test_persistence.py; ensure _ensure_schema is invoked before repository use.\n- Many tests rely on the deterministic behavior of version numbering (next version is MAX(version) + 1).\n- When adding fields to domain models, remember persistence stores full model_dump_json; backward compatibility of model_validate_json is the responsibility of domain model evolutions.\n\nExtending / modifying\n---------------------\n- To add a new repository:\n - Mirror the SQLite API and implement the equivalent PostgreSQL queries in pg_adapter so both backends expose the same methods.\n - Use _serialize to store models and model_validate_json to read them back.\n - Update connection factories to ensure schema changes are applied.\n- To add new columns or indexes:\n - Update both _SCHEMA (pg_adapter) and _SCHEMA (sqlite_adapter) to keep schemas aligned.\n - For PostgreSQL, consider adding migration tooling if schema changes become more complex.\n\nCall graph notes (selected)\n---------------------------\n- pg_connect -> _ensure_schema ensures the PostgreSQL schema exists at connection time.\n- PgUserRepository.get_by_username and get_by_id parse DB rows via _row_to_user, converting roles JSON into AppRole enums and timestamps into datetime.\n- Many repository methods use SELECT ... WHERE / ORDER BY subqueries to return the latest version per planning_id.\n\nFiles and main classes/functions\n-------------------------------\n- src/cm_plan/adapters/sqlite_adapter.py\n - connect(db_path: str | Path = \":memory:\")\n - SqlitePlanningRepository\n - SqliteSemesterContextRepository\n - SqliteRuleSetRepository\n - SqliteAuditLog\n - SqliteUserRepository\n - _ensure_schema, _serialize\n- src/cm_plan/adapters/pg_adapter.py\n - pg_connect(dsn: str)\n - PgPlanningRepository\n - PgSemesterContextRepository\n - PgRuleSetRepository\n - PgAuditLog\n - PgUserRepository\n - _ensure_schema, _serialize, _SCHEMA (Postgres DDL)\n\nIf you need to switch the runtime backend, instantiate repositories with either sqlite_adapter.connect(...) or pg_adapter.pg_connect(...). The repositories expose identical APIs so the rest of the application and tests can remain unchanged.","backend-domain-ports":"# Backend Domain & Ports\n\nBackend Domain & Ports\n======================\n\nOverview\n--------\nThis package defines the pure domain model and the persistence \"ports\" (Protocol interfaces) for the CM planning system. It is intentionally framework- and storage-agnostic: all domain concepts are Pydantic models or plain Python enums, and persistence boundaries are expressed as Protocols. The module is the stable core used by the API layer, the solver/engine, and any persistence/adapters.\n\nKey files\n---------\n- cm_plan/domain/types.py — domain data model (enums, value objects, entities, SemesterContext, Planning, activity catalog, permissions).\n- cm_plan/domain/rules.py — declarative constraint types, Rule and RuleSet models, validation and cloning utilities, plus build_default_ruleset().\n- cm_plan/domain/ports.py — Protocol interfaces for repository ports (PlanningRepository, SemesterContextRepository, RuleSetRepository) and audit logging (AuditLog + AuditEntry).\n- cm_plan/domain/auth.py — User and TokenPayload models (minimal auth domain types).\n\nWhy this layer exists\n---------------------\n- Encapsulates the domain concepts (SDS-1xxx .. SDS-13xxx): physicians, assignments, absences, room capacities, rules, plannings, semester contexts.\n- Provides stable types for the engine and API to depend on without coupling to any DB, web framework, or transport.\n- Defines abstract persistence contracts (ports) that concrete adapters must implement following hexagonal architecture.\n\nQuick architecture diagram\n-------------------------\nThis small diagram shows how the domain and ports fit between external layers (API, Engine) and storage adapters.\n\n```mermaid\nflowchart LR\n API[API routes] -->|build/read| Domain[Domain types & rules]\n Engine[Solver/Engine] -->|consume| Domain\n Domain -->|persist via| Ports[Ports (Protocols)]\n Ports -->|implemented by| Adapters[(Adapters: SQLite/Postgres/...)]\n Domain -->|audit via| Audit[AuditLog port]\n```\n\nCore concepts and types\n-----------------------\n\nEnums and basic types\n- TimeSlot, Role, Seniority, AssignmentState, AbsenceCategory, PlanningStatus, PlanningType, RoomType, Site, SolveMode, TrafficLight.\n - These are StrEnum subclasses; they define canonical string values used across the system.\n- DEFAULT_ACTIVITIES: a dict[str, ActivityDef] with the built-in activity catalog (codes and whether a room is required).\n- DEFAULT_ROLE_PERMISSIONS: default Permission sets for each AppRole; use user_permissions() to compute the effective permissions of a user.\n\nValue objects\n- HalfDay (frozen=True): immutable, comparable value object representing (date, slot). Implements __lt__ for ordering. Frozen so it can be used as a hashable key in collections.\n - Important for sorting and equality checks used by solver and import logic.\n- ActivityDef (frozen=True): immutable activity definition (code, description, requires_room).\n\nEntities and aggregates\n- Physician: schedulable physician with attributes such as id, role, seniority (for CDC), activity_rate, start/end dates, overtime_hours and recup_hs_target_date.\n - Validation: activity_rate in [0.0, 1.0]; overtime_hours can be negative.\n- Assignment: binding of (physician_id, half_day, activity_code). state defaults to AssignmentState.AUTO; justification is optional and preserved across operations (tests rely on this).\n- Absence: physician unavailability across a list of HalfDay values and a category (AbsenceCategory).\n- Guard: guard duty record used by recovery rules (RECUP_WE).\n- RoomCapacity: capacity per half-day and room_type, with optional site.\n- Planning: versioned set of assignments with id, planning_type, status, version, ruleset_id, assignments, published_at.\n - Semantic notes: versioning is important — each save should create a new version (see PlanningRepository semantics below).\n- SemesterContext: \"everything the engine needs to produce a planning\". Includes semester_start/end, physicians, room_capacities, absences, guards, holidays, activity_catalog, locked_assignments, closures, nominative_couplings, calendar_restrictions.\n - The solver and API build and pass SemesterContext to the engine. It is the canonical snapshot for a solve.\n\nRules and RuleSet\n-----------------\n- ConstraintNature — DURE (hard) or MOLLE (soft).\n- ConstraintType — enumerates supported constraint/check types (MIN_STAFFING, SUPERVISION, SINGLE_ASSIGNMENT, QUOTA, RECOVERY_WE, many more including soft-objective types).\n- Rule: a single declarative rule (id, nature, constraint_type, parameters, weight, description, enabled).\n- RuleSet: versioned collection of Rule objects with id (UUID), version (int), name, and rules[].\n\nImportant RuleSet methods:\n- hard_rules() / soft_rules(): convenience filters returning enabled rules by nature.\n- rule_by_id(rule_id): find a rule by its id.\n- validate_coherence() -> list[str]:\n - Runs a set of internal checks and returns a list of human-readable error messages (empty list = valid). Checks include duplicate rule ids, soft rules with non-positive weights, unknown constraint types (defensive), and presence of at least one enabled hard rule.\n- clone(new_name: str | None) -> RuleSet:\n - Produces a new RuleSet with a fresh UUID, version incremented by 1, an adjusted name, and deep-copied rules. Useful when evolving rules between semesters.\n- build_default_ruleset() -> RuleSet:\n - Constructs the initial rule-set used by tests and by typical solver invocations. It includes a comprehensive list of hard and soft rules representing the domain defaults (SDS-13003).\n\nPorts (persistence and audit interfaces)\n---------------------------------------\nDefined in cm_plan/domain/ports.py as typing.Protocols. Adapters implement these to connect the domain to a database, an external audit service, etc.\n\nPlanningRepository (Protocol)\n- save(planning: Planning) -> None\n - Persist a Planning. If the planning already exists, implementations must create a new version and keep previous versions accessible (versioning semantics).\n- get(planning_id: uuid.UUID, version: int | None = None) -> Planning | None\n - Retrieve a planning by id. If version is None, return the latest version.\n- list_versions(planning_id: uuid.UUID) -> list[int]\n - Return all version numbers for a planning, ascending.\n- set_status(planning_id: uuid.UUID, version: int, status: PlanningStatus) -> None\n - Update the status of a specific planning version. Deletion should be logical — e.g., ARCHIVE status replaces physical deletion.\n- list_plannings(status: PlanningStatus | None = None) -> list[Planning]\n - List plannings (one entry per planning id: the latest version). Optional status filter.\n\nSemesterContextRepository (Protocol)\n- save(ctx: SemesterContext, semester_id: str | None = None) -> str\n - Persist a SemesterContext; returns the semester_id (generated or provided).\n- get(semester_id: str) -> SemesterContext | None\n- list_semesters() -> list[str]\n\nRuleSetRepository (Protocol)\n- save(ruleset: RuleSet) -> None\n- get(ruleset_id: uuid.UUID) -> RuleSet | None\n- list_rulesets() -> list[RuleSet]\n - Implementations should preserve rule-set versions (id + version semantics) so clones and historical rule-sets are retrievable.\n\nAuditLog (Protocol) and AuditEntry\n- record(action: str, user: str, detail: str = \"\", payload_before: str = \"\", payload_after: str = \"\", planning_id: uuid.UUID | None = None) -> None\n - Record an auditable action. Fields are plain strings (JSON snapshots expected in payload_before/payload_after).\n- query(planning_id: uuid.UUID | None = None, user: str | None = None, action: str | None = None, since: datetime | None = None, limit: int = 100) -> list[AuditEntry]\n - Return matching AuditEntry objects.\n- AuditEntry: simple container with attributes (id, timestamp, action, user, detail, payload_before, payload_after, planning_id). Constructed directly by adapters or returned from query.\n\nAuth domain\n-----------\n- User (Pydantic model): id (UUID), username, hashed_password, roles (set[AppRole]), is_active, created_at.\n- TokenPayload: JWT payload model (sub, username, roles, exp).\n- Permissions mapping: DEFAULT_ROLE_PERMISSIONS and helper user_permissions(roles, role_permissions=None) -> frozenset[Permission] to compute effective permissions for a user given roles and an optional override mapping.\n\nHow this module is used by the rest of the codebase\n---------------------------------------------------\n- API routes and import utilities consume and produce domain types: HalfDay, AbsenceCategory, SemesterContext, Planning, etc. (see execution flows: import and solve endpoints).\n- The Engine / Solver consumes SemesterContext and RuleSet to produce Planning instances. Tests call build_default_ruleset() and SemesterContext generation routines.\n- Persistence adapters implement the Protocols in ports.py to store and retrieve entities; the API layer depends on these ports rather than any concrete database module.\n- Audit logging is invoked through the AuditLog Protocol in application flows (recording publishes, overrides, rule-set changes, etc.).\n\nDeveloper guidelines and notes\n-----------------------------\n- All domain models are Pydantic models — use their validation features to enforce domain invariants. Many tests rely on these validations (activity_rate range, default states, frozen HalfDay immutability).\n- When extending ConstraintType or adding new Rule attributes, update RuleSet.validate_coherence if necessary to include domain-specific checks.\n- RuleSet.clone increments version and generates a new id — adapters and UI should treat that as a new independent ruleset version.\n- Planning versioning: persistence implementations must respect the semantic that save() either inserts (new id) or appends a new version for existing id. list_plannings() returns latest versions per planning id.\n- Audit payloads are plain strings. The system expects JSON snapshots; adapters should not attempt domain-specific interpretation when recording entries.\n- SemesterContext is the canonical snapshot for a solve. If you add new fields used by engine/solver, ensure API importers and persistence adapters include them.\n- DEFAULT_ACTIVITIES and DEFAULT_ROLE_PERMISSIONS are extension points:\n - DEFAULT_ACTIVITIES can be copied into a new SemesterContext.activity_catalog to provide a custom catalog per semester.\n - user_permissions accepts a role_permissions override for tests or deployments with different permission matrices.\n\nCommon pitfalls\n---------------\n- Mutating frozen models: HalfDay and ActivityDef are frozen; do not attempt to modify them in-place; create new instances.\n- Relying on physical deletes: set_status with ARCHIVE is the intended pattern; adapters must avoid physically deleting historical planning versions.\n- Soft-rule weights: validate_coherence flags non-positive weights on MOLLE rules; ensure weights are > 0 for soft objectives to be meaningful.\n\nWhere to look next\n------------------\n- cm_plan/engine/solver.py — how SemesterContext and RuleSet are consumed to produce a Planning.\n- cm_plan/api/routes.py and cm_plan/api/routes_persistence.py — integration points that call into domain types and ports (import flows and the solve endpoint).\n- Tests under tests/ — they exercise many domain invariants (HalfDay ordering, locked assignments persistence, default rule-set usage).","other-agents-md":"# Other — AGENTS.md\n\n# AGENTS.md\n\nPurpose\nThis file documents repository-wide operational rules you must follow when changing versioning, persistent schemas, or when using our GitNexus code-intelligence tooling. It is not executable code — it defines governance and developer workflows that protect data integrity, prevent regressions, and keep the project indexable by GitNexus.\n\nQuick summary\n- Which files carry the project/versioning authorities.\n- Required steps and safeguards when changing on-disk or persisted schemas.\n- Mandatory GitNexus workflows (impact analysis, detect_changes, rename, etc.).\n- Pre-commit and PR checklists you must follow.\n\nVersioning — where to bump\nWhen releasing or changing public behavior, update the explicit version in the appropriate place(s):\n\n- ui/package.json — browser (frontend) package version (semver).\n- pyproject.toml — server variant and overall project version.\n- engine/Cargo.toml — Rust engine crate version.\n- ui/src/lib/db.ts — SCHEMA_VERSION constant used by the browser DB layer; increment whenever the browser DB schema changes.\n- docker-compose.yml / docker-compose.self-contained.yml — container image tags when publishing containers.\n\nDiagram — which artifacts carry versioning responsibility\n```mermaid\ngraph LR\n A[ui/package.json] -->|browser variant| RELEASE\n B[pyproject.toml] -->|server & overall| RELEASE\n C[engine/Cargo.toml] -->|Rust engine| RELEASE\n D[ui/src/lib/db.ts] -->|SCHEMA_VERSION| DB\n E[docker-compose*.yml] -->|image tags| CONTAINERS\n RELEASE[Release/Version]\n DB[Browser DB Schema]\n CONTAINERS[Container images]\n```\n\nDatabase schema governance rules — mandatory steps\nAny change that modifies a persistent schema or an on-disk database format must follow these rules exactly:\n\n1. Increment the explicit schema version\n - Update the authoritative version constant/file (e.g., ui/src/lib/db.ts: SCHEMA_VERSION) as part of the same change set.\n2. Add upward migrations from every still-supported older version\n - Provide migrations that transform older data to the new schema.\n3. Automatically migrate older databases when opened\n - The application must perform migrations at open/load time so users are not required to run manual scripts.\n4. Re-save in the current schema on next save/export\n - After migration, the app must re-save/export using the new schema so the on-disk format is consistent going forward.\n5. Explicitly refuse opening newer-database formats\n - If the app encounters a database created by a newer application version, it must fail with a clear message asking the user to update.\n6. Never silently lose user data during migration\n - No destructive or lossy behavior without explicit user consent and clear messaging.\n7. Add or update regression tests\n - Tests must cover at least one older-database-opening scenario and verify migration correctness.\n8. Update documentation\n - At minimum update docs/SDS_Planification_CM.md and docs/SDS-traceability-matrix.md when migration or versioning behavior changes.\n\nPractical rule for PRs that touch schemas\nNo pull request should modify a persistent schema unless it includes, in the same change set:\n- The version bump\n- The migration implementation(s)\n- Migration tests\n- Relevant SDS/traceability documentation updates (when visible behavior changes)\n\nWhere to watch for schema-related code\n- ui/src/lib/db.ts — SCHEMA_VERSION and open-time migration logic for the browser variant.\n- Browser export format — .hands-on files (used for import/export persistence).\n- Any server-side / hosted persistence code must follow the same discipline (explicit version, migrations, tests, docs).\n\nImplementing a migration — minimal checklist for the implementer\n- Identify current SCHEMA_VERSION value and all supported older versions.\n- Add migration functions that transform vN -> vN+1 and chain them where necessary.\n- Make open/load code run migrations automatically and fail safely if input is newer than supported.\n- Ensure saving/exporting writes the current schema.\n- Add tests that load a serialized older DB, run the load/migrate path, and assert parity after migration.\n- Update SCHEMA_VERSION at the end of the change set (not as a separate PR).\n- Update docs (SDS planification/traceability matrix) to reflect the change.\n\nGitNexus — code-intelligence and required developer workflows\nThis repository is indexed by GitNexus. The AGENTS doc prescribes mandatory use of GitNexus tools to assess impact and keep the index healthy.\n\nAlways run impact analysis before editing symbols\n- Use: gitnexus_impact({target: \"symbolName\", direction: \"upstream\"})\n- Report blast radius (direct callers, affected processes, risk level) to reviewers before edits.\n\nPre-commit / pre-merge checks\n- MUST run gitnexus_detect_changes() before committing. This verifies your edits affected only expected symbols and execution flows.\n- If gitnexus_impact returns HIGH or CRITICAL, you MUST warn reviewers and address the risk before proceeding.\n\nWhen exploring code and debugging\n- Use gitnexus_query({query: \"<term>\"}) instead of grepping; it returns process-grouped results ordered by relevance.\n- Use gitnexus_context({name: \"<symbol>\"}) to see callers, callees, and which processes use the symbol.\n- Trace execution flows: READ gitnexus://repo/cdim-plan/process/{processName}\n- For regressions: gitnexus_detect_changes({scope: \"compare\", base_ref: \"main\"}) to see what changed in your branch.\n\nWhen refactoring\n- Renaming: run gitnexus_rename({symbol_name: \"old\", new_name: \"new\", dry_run: true}) first. Inspect the graph preview. Then run with dry_run: false.\n- Before split/extract: run gitnexus_context({name: \"target\"}) and gitnexus_impact({target: \"target\", direction: \"upstream\"}) to know all dependencies.\n- After refactor: run gitnexus_detect_changes({scope: \"all\"}) to confirm only expected graph edits occurred.\n\nNever do (hard prohibitions)\n- NEVER edit a function, class, or method without first running gitnexus_impact on it.\n- NEVER ignore HIGH or CRITICAL risk warnings from impact analysis.\n- NEVER rename symbols with blind find-and-replace — use gitnexus_rename.\n- NEVER commit without running gitnexus_detect_changes() to confirm scope.\n\nTools quick reference (commands you will use daily)\n- gitnexus_query({query: \"auth validation\"}) — find concepts and relevant flows.\n- gitnexus_context({name: \"validateUser\"}) — 360° view of a symbol (callers/callees/processes).\n- gitnexus_impact({target: \"X\", direction: \"upstream\"}) — blast radius for edits.\n- gitnexus_detect_changes({scope: \"staged\"}) — pre-commit scope verification.\n- gitnexus_rename({symbol_name: \"old\", new_name: \"new\", dry_run: true}) — preview safe rename.\n- gitnexus_cypher({query: \"MATCH ...\"}) — custom graph queries.\n- npx gitnexus analyze — refresh the GitNexus index after commits.\n - npx gitnexus analyze --embeddings to preserve embeddings (if previously generated).\n\nImpact risk levels — how to interpret\n- d=1 (WILL BREAK) — direct callers/importers; MUST update these before merging.\n- d=2 (LIKELY AFFECTED) — indirect dependencies; should run tests and review.\n- d=3 (MAY NEED TESTING) — transitive; test if on critical path.\n\nResources (where to look inside repo)\n- gitnexus://repo/cdim-plan/context — overview and index status.\n- gitnexus://repo/cdim-plan/clusters — functional clusters.\n- gitnexus://repo/cdim-plan/processes — execution flows catalog.\n- gitnexus://repo/cdim-plan/process/{name} — step-by-step traces.\n\nKeeping the GitNexus index healthy\n- After commits, re-run npx gitnexus analyze to keep the index fresh.\n- If you previously generated embeddings, preserve them by adding --embeddings when reanalyzing.\n- Check .gitnexus/meta.json stats.embeddings to see whether embeddings exist (0 means none). Running analyze without --embeddings will delete previously generated embeddings.\n\nPre-merge checklist (required for reviewers and contributors)\n1. Run gitnexus_impact for each modified symbol; include the reported blast radius in PR description.\n2. Run gitnexus_detect_changes() and attach the report to the PR.\n3. Confirm no HIGH/CRITICAL risk was ignored.\n4. For schema changes: confirm version bump, migrations, migration tests, and docs were included in the same change set.\n5. For refactors/renames: confirm gitnexus_rename was used and its dry-run result reviewed.\n6. Run unit/integration tests, including migration tests that load older DB fixtures.\n\nExample commands (common sequences)\n- Before editing a symbol X:\n - gitnexus_impact({target: \"X\", direction: \"upstream\"})\n- Before commit:\n - gitnexus_detect_changes({scope: \"staged\"})\n- After merging or committing:\n - npx gitnexus analyze\n - (optional) npx gitnexus analyze --embeddings\n\nTesting migrations\n- Add a regression test that reconstructs an older-format DB (or import fixture), opens/loads it with the app code path, asserts migration succeeded, and verifies post-migration state matches expectations.\n- Tests must run in CI and be included in the same PR as the migration code.\n\nConnections to the rest of the codebase\n- SCHEMA_VERSION in ui/src/lib/db.ts is the single-browser-side sentinel that controls browser-side migration behaviour. Any UI change that alters stored structures must update it.\n- .hands-on files are the browser export/import artifact; changes to export shape must be reflected in migrations and version bumps.\n- Server-side persistence and hosted services must mirror the same discipline: explicit version numbers, upward migrations, tests, and documentation updates.\n- GitNexus tooling integrates with your local dev workflow and CI to provide safe, call-graph-aware editing and refactoring.\n\nLast notes\nTreat this document as mandatory policy. The rules exist to preserve user data, prevent accidental breakage in dependent processes, and to make large-scale refactors safe using the GitNexus tools. If any GitNexus tool reports a stale index or missing embeddings, run npx gitnexus analyze locally before continuing.","other-docker-backend":"# Other — docker-backend\n\nOther — docker-backend\n\nPurpose\n- Provides Docker images for running the backend ASGI application contained in this repository.\n- There are two Dockerfiles:\n - docker/backend/Dockerfile — production-oriented image\n - docker/backend/Dockerfile.dev — development image with live-reload and dev dependencies\n\nThese images use the \"uv\" tool (from ghcr.io/astral-sh/uv) to manage and install dependencies and to run the ASGI server.\n\nWhat the Dockerfiles do (high level)\n- Base image: python:3.12-slim\n- Workdir: /app\n- The uv runtime is copied from ghcr.io/astral-sh/uv into the image to provide dependency management CLI and a runner.\n- Dependency files (pyproject.toml and uv.lock) are copied first to leverage Docker layer caching.\n- Two-step install:\n 1. Run uv sync to install dependencies only (so layer can be cached when code changes but deps don’t).\n 2. Copy application source and run uv sync again to install the project itself.\n- Exposes port 8000.\n- The container entrypoint runs uv to start uvicorn and serve the ASGI app at cm_plan.api.app:app.\n\nKey files\n- docker/backend/Dockerfile (production)\n - Uses uv sync with flags: --no-dev, --frozen, --no-install-project for the initial step (install only dependencies, no dev deps, verify lockfile).\n - After copying source, runs uv sync --no-dev --frozen to install the project with locked dependencies (production).\n - CMD: uv run uvicorn cm_plan.api.app:app --host 0.0.0.0 --port 8000\n\n- docker/backend/Dockerfile.dev (development)\n - Uses uv sync with --extra dev to include development extras/dependencies.\n - First uv sync uses --no-install-project to cache dependency-only layer.\n - After copying source, runs uv sync --frozen --extra dev to install the project and dev extras.\n - CMD: uv run uvicorn cm_plan.api.app:app --reload --host 0.0.0.0 --port 8000 (reload enabled)\n\nWhy the copy-order and two-step uv sync?\n- Copying pyproject.toml and uv.lock before the source lets the dependency install layer be cached by Docker. When only application code changes, Docker reuses the layer that installed dependencies and only rebuilds the final layers, speeding up image rebuilds.\n- The first uv sync can be run without installing the project so it only installs dependencies (or dev extras for Dockerfile.dev). The second uv sync installs the project so the resulting image contains the installed package.\n\nHow the container starts the app\n- The CMD uses uv to run uvicorn:\n - Production: [\"uv\", \"run\", \"uvicorn\", \"cm_plan.api.app:app\", \"--host\", \"0.0.0.0\", \"--port\", \"8000\"]\n - Dev: [\"uv\", \"run\", \"uvicorn\", \"cm_plan.api.app:app\", \"--reload\", \"--host\", \"0.0.0.0\", \"--port\", \"8000\"]\n- The ASGI application object expected by uvicorn is cm_plan.api.app:app. This ties the image directly to that module path in the codebase.\n\nBuild and run examples\n- Build production image:\n docker build -t cm-plan-backend:latest -f docker/backend/Dockerfile .\n\n- Run production container:\n docker run --rm -p 8000:8000 cm-plan-backend:latest\n\n- Build development image:\n docker build -t cm-plan-backend:dev -f docker/backend/Dockerfile.dev .\n\n- Run development container with source mounted for iterative edit/reload:\n docker run --rm -p 8000:8000 -v \"$(pwd)/src:/app/src\" cm-plan-backend:dev\n - Mounting src lets uvicorn's --reload pick up changes without rebuilding the image.\n\nNotes on dependency flags used\n- --frozen: ensures the lockfile (uv.lock) matches the dependency specification; builds will fail if it would change the lock state.\n- --no-dev: excludes development dependencies (used in production Dockerfile).\n- --extra dev: installs extras named \"dev\" (used in Dockerfile.dev to include tooling for development).\n- --no-install-project: run dependency resolution/install without installing the project package itself — used to create a reusable dependency layer.\n\nPort and networking\n- Both Dockerfiles EXPOSE port 8000 and uvicorn is configured to bind to 0.0.0.0:8000.\n- In typical deployment you will map host port 8000 to container port 8000 (docker run -p 8000:8000) or put the container behind a reverse proxy/load-balancer.\n\nTroubleshooting\n- \"uv not found\" — Ensure the uv files are present in the referenced ghcr image or that the COPY source exists; the Dockerfiles copy /uv /uvx /bin/ from the UV image.\n- Dependency resolution fails — Check that uv.lock and pyproject.toml are synchronized; uv sync --frozen will error if they diverge.\n- Live reload not triggering in dev — If you mount source into the container for dev, confirm file modification notifications are seen by the container runtime and that uvicorn --reload is being used (Dockerfile.dev does that).\n\nConnection to the rest of the codebase\n- The backend Docker image runs the ASGI application object at cm_plan.api.app:app — that is the runtime entrypoint for backend HTTP requests.\n- The Dockerfiles depend on correct dependency specification in pyproject.toml and a matching uv.lock file.\n- The source code is expected under src/ and is copied into /app/src in the image. The installed project is produced by the second uv sync step.\n\nMermaid: Build & run flow\nNote: small diagram to visualize the build steps and run flow.\n\n```mermaid\nflowchart LR\n A[Copy pyproject.toml + uv.lock] --> B[uv sync (deps only)]\n B --> C[Copy src/]\n C --> D[uv sync (install project)]\n D --> E[Image created]\n E --> F[uv run uvicorn cm_plan.api.app:app (port 8000)]\n```\n\nBest practices and recommendations\n- Keep pyproject.toml and uv.lock updated and committed. The Docker build relies on uv.lock for reproducible builds.\n- Use Dockerfile.dev for local development to get live reload and dev dependencies; mount src/ to avoid frequent image rebuilds.\n- For production deployments, consider running the image behind a process manager or reverse proxy (e.g., ingress, nginx) and attach logging/monitoring as needed.\n- If image size becomes a concern, consider multi-stage builds (build/install artifacts in one stage, copy only runtime files to a smaller base). The current Dockerfile already minimizes layers by using slim python base and installing only necessary items.\n\nSummary\n- The docker/backend Dockerfiles produce an image that runs the ASGI app cm_plan.api.app:app using uv + uvicorn.\n- They are optimized for Docker layer caching by installing dependencies before copying source, and they provide a development variant with reload and dev dependencies.","other-docker-compose-postgres-yml":"# Other — docker-compose.postgres.yml\n\ndocker-compose.postgres.yml\n===========================\n\nPurpose\n-------\nThis compose fragment defines a minimal PostgreSQL service and a small integration hook for a backend container that depends on that database. It is intended for local development and testing: it brings up a Postgres 16 (alpine) container, exposes the database port to the host, persists data in a named Docker volume, and provides a healthcheck so other services (here, the backend) can wait until the database is ready.\n\nAt a glance\n-----------\n- Services:\n - postgres: PostgreSQL 16 (alpine) server with healthcheck and a named data volume.\n - backend: placeholder for the application/worker that needs the database; it receives a CM_PG_DSN environment variable and will wait for postgres to become healthy before starting.\n- Named volume:\n - pgdata: stores Postgres data at /var/lib/postgresql/data inside the postgres container.\n- Health & orchestration:\n - postgres exposes a healthcheck using pg_isready; backend uses depends_on with condition: service_healthy.\n\nKey parts and how they work\n---------------------------\n\npostgres service\n- image: postgres:16-alpine\n - Uses the official Postgres 16 image built on Alpine Linux.\n- environment:\n - POSTGRES_DB, POSTGRES_USER, POSTGRES_PASSWORD are set via shell-style substitution:\n - POSTGRES_DB: ${POSTGRES_DB:-cm_plan}\n - POSTGRES_USER: ${POSTGRES_USER:-cm_plan}\n - POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-cm_plan}\n - These values default to \"cm_plan\" if not provided by the environment or a .env file.\n- ports:\n - \"5432:5432\" — host port 5432 maps to container port 5432. Useful for connecting from host tools (psql, DBeaver).\n- volumes:\n - pgdata:/var/lib/postgresql/data — named volume ensures DB files persist between container restarts.\n- healthcheck:\n - test: [ \"CMD-SHELL\", \"pg_isready -U ${POSTGRES_USER:-cm_plan} -d ${POSTGRES_DB:-cm_plan}\" ]\n - Uses pg_isready with the same env var defaults to determine readiness.\n - interval: 5s\n - timeout: 3s\n - retries: 5\n - The healthcheck periodically probes the Postgres server; Docker exposes this state and other services can depend on it.\n\nbackend service\n- environment:\n - CM_PG_DSN: ${CM_PG_DSN:-postgresql://cm_plan:cm_plan@postgres:5432/cm_plan}\n - Default DSN uses the same defaults for DB/user/password and points to host \"postgres\" (the service name), port 5432, database \"cm_plan\".\n - In a compose network, \"postgres\" resolves to the postgres container via Docker's embedded DNS.\n- depends_on:\n - postgres:\n condition: service_healthy\n - This tells Docker Compose to wait until the postgres service reports healthy (per its healthcheck) before starting backend.\n\nNamed volume\n- pgdata is declared at the bottom of the file:\n - volumes:\n pgdata:\n - Docker manages its physical location. The named volume persists Postgres data across container recreation.\n\nTypical usage\n-------------\nFrom the project directory that contains this file:\n\n- Bring up services:\n - docker compose -f docker-compose.postgres.yml up\n - Add -d to run in background: docker compose -f docker-compose.postgres.yml up -d\n\n- Tear down and remove containers (preserves data in named volume):\n - docker compose -f docker-compose.postgres.yml down\n\n- Tear down and remove containers + named volumes (data reset):\n - docker compose -f docker-compose.postgres.yml down -v\n - Or remove the specific volume manually:\n - docker volume ls # find <project>_pgdata\n - docker volume rm <project>_pgdata\n\n- Connect from host with psql (using defaults):\n - psql -h localhost -p 5432 -U cm_plan -d cm_plan\n - If you changed POSTGRES_PASSWORD you will be prompted for it.\n\nOverriding environment values\n-----------------------------\nYou can override any default environment variable using:\n- a shell environment variable: export POSTGRES_PASSWORD=secret\n- a .env file in the same directory (Compose reads .env automatically)\n- explicit override in your own docker-compose.yml or docker-compose.override.yml\n\nExamples:\n- Provide a stronger password:\n - export POSTGRES_PASSWORD='SuperSecret'\n - docker compose -f docker-compose.postgres.yml up\n- Provide a different DB name and DSN:\n - POSTGRES_DB=other_db POSTGRES_USER=other_user POSTGRES_PASSWORD=other_pw CM_PG_DSN=\"postgresql://other_user:other_pw@postgres:5432/other_db\" docker compose -f docker-compose.postgres.yml up\n\nHow this connects to the rest of the codebase\n---------------------------------------------\n- The backend service receives CM_PG_DSN and is expected to use it to connect to Postgres. The default CM_PG_DSN points at the postgres service in the same Compose network (postgres:5432).\n- Because depends_on uses condition: service_healthy, the backend will not start until the postgres healthcheck succeeds. This reduces race conditions where the app starts before the DB is accepting connections.\n- The docker-compose fragment is intended to be included or used alongside other compose files that define the backend image and app-specific configuration. It focuses only on DB provisioning and the basic integration hook for the backend.\n\nSecurity and production notes\n-----------------------------\n- Never use the default credentials (cm_plan / cm_plan) in production.\n- For production deployments:\n - Use Docker secrets, environment management, or an external credential store instead of plain environment variables.\n - Avoid exposing Postgres on a public host port unless necessary. Remove the ports mapping or bind to localhost only if the DB is only for local dev.\n - Consider using a managed database with backups and high-availability rather than a single container.\n- The named volume persists data locally; ensure backups are in place if you need to preserve data across host failures.\n\nTroubleshooting\n---------------\n- Backend stuck starting while postgres seems up:\n - Check postgres health: docker compose -f docker-compose.postgres.yml ps or docker inspect --format '{{json .State.Health}}' <postgres-container-id>\n - View logs: docker compose -f docker-compose.postgres.yml logs postgres\n - Ensure pg_isready returns success inside the container:\n - docker compose -f docker-compose.postgres.yml exec postgres pg_isready -U ${POSTGRES_USER:-cm_plan} -d ${POSTGRES_DB:-cm_plan}\n- Cannot connect from host:\n - Ensure service is up and bound to 0.0.0.0:5432 in the container logs.\n - Check any firewall rules blocking 5432.\n- Data disappeared after recreating services:\n - Confirm whether docker compose down -v was used (that removes volumes).\n - Inspect volumes: docker volume ls | grep pgdata\n\nSmall architecture diagram\n--------------------------\nThis simple diagram shows the runtime relationship between components:\n\ngraph LR\n Host[Host] -->|5432| Postgres[postgres:5432]\n Postgres -->|/var/lib/postgresql/data| Volume[pgdata (named volume)]\n Backend[backend] -->|CM_PG_DSN -> postgresql://...@postgres:5432/cm_plan| Postgres\n\n(Compose provides an internal network so \"backend\" resolves to \"postgres\".)\n\nNotes about compatibility\n-------------------------\n- The compose file does not declare a version key. The use of depends_on with condition: service_healthy relies on a Compose implementation that supports healthcheck-based waiting. Use a recent Docker Compose (v2 or a compatible CLI) for expected behavior.\n\nSummary\n-------\ndocker-compose.postgres.yml provides a small, opinionated local Postgres setup with persistence and a healthcheck-driven dependency for a backend service. It's optimized for development workflows, easily overridable via environment variables or a .env file, and integrates with the backend by supplying a CM_PG_DSN default that points at the postgres service. Use it as-is for local testing or extend/secure it for staging/production.","other-docker-compose-self-contained-yml":"# Other — docker-compose.self-contained.yml\n\nOther — docker-compose.self-contained.yml\n\nPurpose\n- Defines a single, self-contained Docker Compose service to run the \"cdim-plan-self-contained\" application image.\n- Intended for local or single-host deployments where you want to run the plan service without the rest of the system.\n- Provides defaults that can be overridden with environment variables so the same file can be used in different environments (development, CI, testing).\n\nQuick overview of the service (as defined in the file)\n- Service name: cdim-plan-self-contained\n- Image: ${CDIM_PLAN_IMAGE:-ghcr.io/dgehriger/cdim-plan-self-contained}:${CDIM_PLAN_TAG:-latest}\n- Container name: ${CDIM_PLAN_CONTAINER_NAME:-cdim-plan-self-contained}\n- Restart policy: unless-stopped\n- Port mapping: host ${CDIM_PLAN_PORT:-8080} -> container 80\n- Healthcheck: HTTP probe against http://127.0.0.1/healthz inside the container\n- Pull policy: always (in this compose file)\n\nEnvironment variables and defaults\n- CDIM_PLAN_IMAGE\n - Default: ghcr.io/dgehriger/cdim-plan-self-contained\n - Purpose: image repository to pull\n- CDIM_PLAN_TAG\n - Default: latest\n - Purpose: image tag to use\n- CDIM_PLAN_CONTAINER_NAME\n - Default: cdim-plan-self-contained\n - Purpose: explicit Docker container name\n- CDIM_PLAN_PORT\n - Default: 8080\n - Purpose: host port bound to the container's port 80\n\nHow it works (behavioral details)\n- Image resolution\n - The final image used is built from CDIM_PLAN_IMAGE and CDIM_PLAN_TAG. Example: ghcr.io/dgehriger/cdim-plan-self-contained:latest.\n - pull_policy: always instructs Compose to pull the image before starting. (If your Compose implementation ignores this field, use docker compose pull manually.)\n- Container lifecycle\n - restart: unless-stopped causes Docker to restart the container when it exits or on daemon start, except when the container was explicitly stopped.\n - container_name pins a stable name for the container. This simplifies ad-hoc commands (docker logs cdim-plan-self-contained), but prevents running multiple instances on the same host with the same name.\n- Networking / ports\n - The service exposes container port 80 and maps it to the host port configured via CDIM_PLAN_PORT (default 8080).\n - No custom networks are defined, so the service attaches to the compose default network when started with docker compose.\n- Healthcheck\n - Executed inside the container:\n test: [\"CMD-SHELL\", \"wget -q -O - http://127.0.0.1/healthz || exit 1\"]\n - Timing:\n - interval: 30s (how often the check runs)\n - timeout: 5s (maximum duration of the check command)\n - retries: 3 (fail after 3 consecutive failures)\n - start_period: 10s (grace period after start before counting failures)\n - Notes:\n - The healthcheck command uses wget inside the container. If the image does not include wget, the healthcheck will fail. Replace or modify the command (e.g., using curl or a shell builtin) if your image lacks wget.\n - The health endpoint is /healthz on loopback; the service must bind to 127.0.0.1 inside the container for the check to reach it, or use 0.0.0.0 if appropriate.\n\nIntegration with the rest of the codebase\n- This file is a standalone Compose definition intended to run the plan component by itself.\n- It does not declare volumes, secrets, or networks shared with other compose files. If you need to integrate with other services (databases, shared caches, proxies), use an override compose file or compose project that defines networks and connects services together.\n- Typical use case in repository workflows:\n - Local development: run just the plan UI/backend to iterate quickly.\n - CI: bring up a single service for integration tests against a stubbed environment.\n\nRecommended usage and common commands\n- Start service (detached):\n - docker compose -f docker-compose.self-contained.yml up -d\n- Stop and remove containers:\n - docker compose -f docker-compose.self-contained.yml down\n- Pull the image explicitly (useful if your Compose ignores pull_policy):\n - docker compose -f docker-compose.self-contained.yml pull\n- View logs:\n - docker compose -f docker-compose.self-contained.yml logs -f\n - docker logs -f cdim-plan-self-contained\n- Exec into running container:\n - docker exec -it cdim-plan-self-contained /bin/sh\n- Override defaults on the command line:\n - CDIM_PLAN_PORT=9090 CDIM_PLAN_TAG=v1.2.3 docker compose -f docker-compose.self-contained.yml up -d\n- Use an env file:\n - Create a .env or custom file with the variables listed above. Compose will read .env by default or pass using --env-file.\n\nDebugging tips and common failure modes\n- Container not starting / crashloop:\n - Inspect logs: docker compose logs or docker logs <container_name>\n - Check docker inspect <container_name> for restart count and exit codes.\n- Healthcheck failing:\n - Confirm the application serves /healthz inside the container:\n - docker exec -it <container> wget -q -O - http://127.0.0.1/healthz\n - If wget is missing, either modify the healthcheck command to use curl or add wget to the image.\n - Increase start_period if the service needs more warm-up time.\n- Port conflicts on host:\n - If CDIM_PLAN_PORT is already used, docker compose will fail to bind. Change CDIM_PLAN_PORT or stop the conflicting process.\n- Image not pulled:\n - If your Compose ignores pull_policy, run docker compose pull manually or docker pull ${CDIM_PLAN_IMAGE}:${CDIM_PLAN_TAG}.\n\nPortability and recommendations\n- container_name\n - Useful for convenience but reduces portability: you cannot run the same compose file multiple times on one host without editing container_name. For multi-instance or dynamic deployments, remove container_name or make it unique per instance.\n- pull_policy\n - Not all Compose implementations support this key the same way. If reproducible builds/pulls are important in your CI, call docker compose pull in CI steps explicitly.\n- Healthcheck tooling\n - Prefer robust health checks that do not rely on external binaries unless you control the image contents. Consider using a small POSIX shell or curl-based probe if curl is available.\n\nMinimal architecture diagram\n- This service is simple and self-contained. The following small diagram illustrates the runtime mapping.\n\n```mermaid\nflowchart LR\n Host[Host:CDIM_PLAN_PORT(host)]\n Container[Container: cdim-plan-self-contained (port 80)]\n Registry[Image Registry<br/>ghcr.io/dgehriger/...]\n Host -->|host port -> container:80| Container\n Container -->|image pulled from| Registry\n Container -->|health probe /healthz| Container\n```\n\nChange management and contributions\n- To change the image or default tag: update CDIM_PLAN_IMAGE or CDIM_PLAN_TAG default values in the compose file.\n- To change the healthcheck behavior: edit the test command or adjust interval/timeout/retries/start_period to match service characteristics.\n- To add persistent storage, secrets, or networks: add volumes, secrets, or networks sections and connect them to the service. When doing so, ensure environment-variable-driven defaults are preserved to allow the file to remain usable standalone.\n\nFile location and naming\n- File: docker-compose.self-contained.yml\n- Purpose: lightweight single-service compose file for the plan component\n\nIf you need to run this service alongside other services in the repository, create a compose override or a top-level compose file that includes this service and defines shared networks, volumes, and secrets as required.","other-docker-compose-yml":"# Other — docker-compose.yml\n\nOther — docker-compose.yml\n\nOverview\n- Purpose: Defines a local multi-container setup for the project using Docker Compose. It builds three services (engine, backend, ui), wires them together, exposes ports for local access, and configures persistence for the backend's SQLite database.\n- Location of related build artifacts:\n - engine Dockerfile: docker/engine/Dockerfile\n - backend Dockerfile: docker/backend/Dockerfile\n - ui Dockerfile: docker/ui/Dockerfile\n\nServices and key settings\n1. engine\n- Build:\n - context: .\n - dockerfile: docker/engine/Dockerfile\n- Ports: \"8001:8001\" — engine listens on 8001 inside the container and is exposed on the host at 8001.\n- Restart: unless-stopped\n- Healthcheck:\n - test: python -c \"import urllib.request; urllib.request.urlopen('http://localhost:8001/api/health')\"\n - interval: 10s, timeout: 5s, retries: 3\n - Purpose: lets Compose (when supporting health-based waits) and humans know when the engine is ready to accept requests.\n\n2. backend\n- Build:\n - context: .\n - dockerfile: docker/backend/Dockerfile\n- Ports: \"8000:8000\" — backend API is exposed on host port 8000.\n- Environment variables (available to the container):\n - CM_DB_PATH: ${CM_DB_PATH:-/data/cm_plan.db}\n - Default maps the database file to the mounted volume path /data/cm_plan.db.\n - Can be overridden via environment or an .env file.\n - CM_ENGINE_URL: ${CM_ENGINE_URL:-http://engine:8001}\n - Default points at the engine service by Docker Compose service name. Inside the compose network, the hostname engine resolves to the engine container.\n - CM_JWT_SECRET: ${CM_JWT_SECRET:-dev-secret-change-in-production}\n - Default secret is insecure and must be changed in production.\n- depends_on:\n - engine with condition: service_healthy\n - Compose will wait until the engine passes its healthcheck before starting the backend (see caveat below about Compose implementations).\n- Volumes:\n - sqlite_data:/data — named volume mounted at /data inside the backend container to persist the SQLite DB file.\n- Restart: unless-stopped\n- Healthcheck:\n - test: python -c \"import urllib.request; urllib.request.urlopen('http://localhost:8000/api/health')\"\n - interval: 10s, timeout: 5s, retries: 3\n\n3. ui\n- Build:\n - context: .\n - dockerfile: docker/ui/Dockerfile\n- Ports: \"3000:80\" — UI served from container port 80 is available on host port 3000.\n- depends_on:\n - backend with condition: service_healthy\n - UI will wait for the backend to be healthy before starting (see caveat below).\n- Restart: unless-stopped\n\nVolumes\n- sqlite_data: a named Docker volume used to persist the backend SQLite database at /data inside the backend container.\n\nHow the pieces connect\n- Network: Compose creates a single default network for these services. Service names (engine, backend, ui) are DNS hostnames on that network.\n- Backend -> Engine: backend uses CM_ENGINE_URL which defaults to http://engine:8001. This resolves to the engine service on the Compose network, so HTTP requests from backend to engine are service-local.\n- UI -> Backend: The UI depends on backend and will use whatever backend URL is configured in its build/runtime environment or frontend configuration. Externally, you can reach backend at http://localhost:8000 and UI at http://localhost:3000.\n\nCompose behavior and compatibility notes\n- The compose file uses depends_on with condition: service_healthy. The conditional form causes Compose to wait for a service's healthcheck to report healthy before starting dependent services. Not all Compose implementations (or versions) support depends_on.condition — behavior varies between compose spec versions and the legacy docker-compose. If your tooling ignores the condition:\n - Services may start before dependencies are fully ready.\n - You can implement application-level retries or use simple start-time wait scripts (wait-for / curl loops) inside containers as an alternative.\n- The file does not declare a file format version explicitly; ensure your Docker Compose CLI and engine support the features used here (healthcheck, depends_on conditions, named volumes).\n\nTypical developer workflows\n- Build and start in background:\n - docker-compose build\n - docker-compose up -d\n- Tail logs:\n - docker-compose logs -f backend\n - docker-compose logs -f engine\n- Stop and remove containers:\n - docker-compose down\n- Remove containers and volumes (data loss):\n - docker-compose down -v\n- Rebuild a single service:\n - docker-compose up -d --build backend\n\nEnvironment overrides\n- Use a .env file in the same directory or set environment variables in the shell to override defaults:\n - CM_DB_PATH (e.g., /data/custom.db)\n - CM_ENGINE_URL (e.g., http://localhost:8001 for non-Compose networking)\n - CM_JWT_SECRET (must be set for production)\n- Example .env:\n - CM_JWT_SECRET=super-secret-value\n - CM_DB_PATH=/data/cm_plan.db\n\nAccessing services from host\n- Engine API: http://localhost:8001/api\n- Backend API: http://localhost:8000/api\n- UI: http://localhost:3000\n\nHealthchecks and debugging\n- Healthcheck details:\n - Each healthcheck runs a Python single-line script that performs an HTTP GET to /api/health on localhost inside the container.\n - interval: 10s, timeout: 5s, retries: 3 — if the check fails the container will be marked unhealthy.\n- Inspect a container's health state:\n - docker inspect --format='{{json .State.Health}}' <container_name_or_id>\n- Check logs if healthcheck repeatedly fails:\n - docker logs <container_name_or_id>\n - docker-compose logs backend\n- Inspect volume contents:\n - docker run --rm -v sqlite_data:/data alpine ls -la /data\n\nSecurity and production considerations\n- CM_JWT_SECRET default FIXME: dev-secret-change-in-production — replace with a secure secret before exposing to production.\n- SQLite persistence:\n - The backend uses a named Docker volume (sqlite_data) to persist data. In production you may want a managed database instead of SQLite for concurrency and reliability.\n- Exposed ports:\n - Current setup maps container ports directly to host. In production consider reversing proxy or firewalling direct access.\n\nSmall architecture diagram\n- The following Mermaid diagram summarizes the service relationships and the persistent volume.\n\nflowchart LR\n Engine(engine:8001)\n Backend(backend:8000)\n UI(ui:80 -> host:3000)\n Volume(sqlite_data)\n Engine -->|depends_on/health| Backend\n Backend -->|depends_on/health| UI\n Volume --> Backend\n\nTroubleshooting checklist\n- If backend cannot reach engine:\n - Confirm engine container is running and healthy: docker ps, docker inspect health.\n - Confirm CM_ENGINE_URL is correctly set (should be http://engine:8001 inside Compose).\n- If UI shows no data:\n - Confirm backend is healthy and responding (curl http://localhost:8000/api/health).\n - Check frontend config for the backend URL.\n- If DB seems empty after restart:\n - Verify that sqlite_data volume exists: docker volume ls\n - Inspect files in the volume as shown above.\n\nNotes for contributors\n- Dockerfile changes:\n - Place changes in the referenced Dockerfiles (docker/engine/Dockerfile, docker/backend/Dockerfile, docker/ui/Dockerfile).\n- If you add services that must be ready before others, include a healthcheck and add depends_on with condition: service_healthy (and be aware of compose tooling compatibility).\n- When changing CM_DB_PATH default, update volume mount and any startup scripts that assume /data/cm_plan.db.\n\nThis compose configuration is optimized for local development and quick iteration. For production, review networking, secrets management, and data storage choices before deploying.","other-docker-engine":"# Other — docker-engine\n\ndocker/engine — Dockerfile\n==========================\n\nLocation\n- docker/engine/Dockerfile\n\nPurpose\n- Builds a container image that runs the \"engine\" service for this repository.\n- The image runs a Uvicorn ASGI server that serves the application object at cm_plan.api.engine_app:app on port 8001.\n- The Dockerfile is arranged to maximize Docker layer cache reuse for dependency installation while still installing the project source into the image.\n\nQuick summary of what it does\n- Base on python:3.12-slim.\n- Copy runtime helper binaries (uv, uvx, etc.) from ghcr.io/astral-sh/uv:latest into the image.\n- Use the uv tool to resolve and install dependencies twice:\n - First sync: install only pinned dependencies (no project install) — this enables caching of dependency layers when only source changes.\n - Second sync: after copying src/, install the project itself.\n- Expose port 8001 and run the app with uv -> uvicorn cm_plan.api.engine_app:app --host 0.0.0.0 --port 8001.\n\nFile contents (high level)\n- FROM python:3.12-slim AS base\n- WORKDIR /app\n- COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/\n- COPY pyproject.toml uv.lock README.md ./\n- RUN uv sync --no-dev --frozen --no-install-project\n- COPY src/ src/\n- RUN uv sync --no-dev --frozen\n- EXPOSE 8001\n- CMD [\"uv\", \"run\", \"uvicorn\", \"cm_plan.api.engine_app:app\", \"--host\", \"0.0.0.0\", \"--port\", \"8001\"]\n\nHow it works (layer-by-layer)\n1. Base image\n - Starts from python:3.12-slim. This is the Python runtime image used for the container.\n2. Add helper binaries\n - Copying /uv and /uvx from ghcr.io/astral-sh/uv:latest makes the uv tool (and likely uvx) available in the final image without installing it via pip.\n3. Dependency-only sync (cacheable)\n - COPY pyproject.toml uv.lock README.md ./\n - RUN uv sync --no-dev --frozen --no-install-project\n - This step uses uv to install all declared dependencies but explicitly avoids installing the project itself. Because only pyproject.toml/uv.lock are copied before this RUN, this layer will be reused when only source code changes.\n4. Add project source\n - COPY src/ src/\n5. Final sync (project install)\n - RUN uv sync --no-dev --frozen\n - This re-runs uv sync now that the project source exists in the image; uv will install the project package into the environment.\n6. Runtime\n - EXPOSE 8001 documents the port.\n - CMD runs the uv wrapper to launch uvicorn with the application import path cm_plan.api.engine_app:app bound to 0.0.0.0 on port 8001.\n\nWhy two uv sync runs?\n- The first run (with --no-install-project) installs pinned dependencies only and creates a stable layer that doesn't change when you edit source code. This speeds up rebuilds during development or CI when only application code changes.\n- The second run installs the actual project package into the image (so that imports like cm_plan.* work inside the container).\n\nEntrypoint and runtime behavior\n- The service starts via uv run uvicorn cm_plan.api.engine_app:app --host 0.0.0.0 --port 8001\n- The application module path is cm_plan.api.engine_app and the ASGI application object is named app.\n- Port mapping expectation: container listens on 8001; map that to a host port when running.\n\nBuild and run examples\n- Build:\n docker build -t cm-plan-engine:latest -f docker/engine/Dockerfile .\n- Run (default background):\n docker run -d --name cm-plan-engine -p 8001:8001 cm-plan-engine:latest\n- Run with logs to foreground:\n docker run --rm --name cm-plan-engine -p 8001:8001 cm-plan-engine:latest\n- Run with mounted code for iterative development (note: mounting src over the image copy may require you to run uv sync locally or in container to ensure dependencies/project installation):\n docker run --rm -it -p 8001:8001 -v \"$(pwd)/src:/app/src\" cm-plan-engine:latest\n\nNotes on uv and flags used\n- uv is a tool (from ghcr.io/astral-sh/uv) used in this Dockerfile to synchronize dependencies.\n- Flags used:\n - --no-dev: do not install development dependencies.\n - --frozen: do not allow dependency resolution changes (fail if lockfile and project disagree).\n - --no-install-project (first sync only): prevents installing the project code into site-packages; used for cache-friendly dependency install.\n\nConnection to the codebase\n- The Docker image runs cm_plan.api.engine_app:app — that is the concrete application object served by this service. Any change to the API implementation under cm_plan.api.* will affect what this image serves.\n- The Dockerfile depends on pyproject.toml and uv.lock for dependency resolution and on src/ for the project code.\n- This module does not itself call other services — it simply packages and runs the engine app. In runtime, the app may call other internal or external services according to application code (not visible in the Dockerfile).\n\nBuild cache tips\n- When you change pyproject.toml or uv.lock, the first dependency sync layer must be rebuilt; keep dependency changes minimal where possible.\n- When you change code in src/ only, Docker will reuse the dependency layer and only perform the later steps (COPY src/ and second uv sync).\n\nDevelopment/debugging tips\n- To iterate quickly on Python code without rebuilding the image each time:\n - Mount src/ into the container and run the container interactively.\n - If the project needs to be installed into site-packages for certain import behavior, run uv sync inside the running container after mounting source (or install in editable mode locally).\n- To run a different command for debugging, override CMD:\n docker run --rm -it -p 8001:8001 cm-plan-engine:latest uv run uvicorn cm_plan.api.engine_app:app --reload --port 8001\n Note: --reload may require source code present in container and is intended for development only.\n\nSecurity and production recommendations\n- Run as non-root inside the container: the Dockerfile does not currently switch to a non-root user. Consider adding a user and switching to it before CMD for production images.\n- Keep uv.lock, pyproject.toml consistent and audited.\n- Consider adding a HEALTHCHECK to the Dockerfile that probes the served HTTP health endpoint (if the app exposes one).\n- Minimize image size where possible (e.g., use slim-but-appropriate base, remove build tools if present).\n\nTroubleshooting\n- \"ModuleNotFoundError: cm_plan\": Ensure the second uv sync step completes successfully and that src/ was copied into /app/src and installed. If you mount src/ at runtime without installing, imports may fail depending on sys.path.\n- Dependency discrepancies: If uv sync --frozen fails, update uv.lock to match pyproject.toml (run uv lock/resolve locally) and rebuild.\n- Permission errors: If you run as non-root host user and mount volumes, check UID/GID mapping and file permissions.\n\nMermaid diagram (build + runtime, simplified)\ngraph TD\n A[python:3.12-slim] --> B[Copy /uv /uvx binaries]\n B --> C[Copy pyproject.toml + uv.lock]\n C --> D[uv sync --no-install-project (deps only)]\n D --> E[Copy src/]\n E --> F[uv sync (install project)]\n F --> G[Runtime: uv -> uvicorn cm_plan.api.engine_app:app (port 8001)]\n\nWhat this module does NOT do\n- It does not run tests, perform migrations, or run any multi-service orchestration; it only builds the runtime image for the engine service.\n- It does not embed secrets; environment variables and secrets must be provided at container runtime or via orchestration (Kubernetes, docker-compose, etc).\n\nWhere to change things\n- To change the served app path: edit CMD (cm_plan.api.engine_app:app).\n- To change dependencies: update pyproject.toml and regenerate uv.lock, then rebuild.\n- To change Python version: change FROM python:3.12-slim to the desired tag; be prepared to validate dependency compatibility.\n\nThis Dockerfile is the canonical packaging for the engine service and should be used by CI/CD and local development to produce the runnable engine container.","other-docker-ui":"# Other — docker-ui\n\ndocker/ui — Dockerized UI (web client) for the project\n\nOverview\n- Purpose: Build and package the repository's web UI (located in ui/) into a Docker image that serves the single-page application (SPA) with nginx. There are three image variants provided:\n - docker/ui/Dockerfile — production build served by nginx, expects a backend service reachable at hostname backend:8000 for API proxying.\n - docker/ui/Dockerfile.dev — development image that runs the UI dev server (npm run dev) and exposes port 3000.\n - docker/ui/Dockerfile.self-contained — produces a self-contained static build (single HTML artifact) served by nginx; includes a /healthz endpoint and image metadata labels.\n\nKey files\n- docker/ui/Dockerfile\n- docker/ui/Dockerfile.dev\n- docker/ui/Dockerfile.self-contained\n- docker/ui/nginx.conf\n- docker/ui/nginx.self-contained.conf\n- ui/ (source code of the frontend — not included here, but copied into the images by the Dockerfiles)\n\nHow the images work\n\n1) Production image (docker/ui/Dockerfile)\n- Multistage build:\n - Build stage: FROM node:22-slim\n - WORKDIR /app\n - COPY ui/package.json ui/package-lock.json* ./\n - RUN npm ci\n - COPY ui/ ./\n - RUN npm run build\n - Output: the static build artifacts are expected to be in /app/build\n - Runtime stage: FROM nginx:alpine\n - COPY --from=build /app/build /usr/share/nginx/html\n - COPY docker/ui/nginx.conf /etc/nginx/conf.d/default.conf\n - EXPOSE 80\n- nginx.conf behavior:\n - Serves the SPA static files from /usr/share/nginx/html\n - SPA fallback: any unknown path is served index.html via try_files $uri $uri/ /index.html; this allows client-side routing to work.\n - Proxying API calls: requests under /api/ are proxied to http://backend:8000/api/ with standard proxy headers (Host, X-Real-IP, X-Forwarded-For, X-Forwarded-Proto).\n - Implication: when this container is run inside a Docker network, the backend must be reachable at hostname backend on port 8000 (e.g., a docker-compose service named backend).\n\n2) Development image (docker/ui/Dockerfile.dev)\n- Single-stage image based on node:22-slim:\n - Installs dependencies via npm ci\n - Copies ui/ source into the container\n - EXPOSE 3000\n - CMD npm run dev -- --host 0.0.0.0 --port 3000\n- Intended usage: run a container for local development of the frontend. The dev server listens on 0.0.0.0:3000 inside the container, so map port 3000 to host.\n\n3) Self-contained image (docker/ui/Dockerfile.self-contained)\n- Multistage build similar to production, but:\n - Build runs npm run build:single which is expected to emit a single-file artifact at /app/dist/hands-on.html\n - Runtime uses nginx:alpine but copies docker/ui/nginx.self-contained.conf and sets the single HTML as the served index:\n - COPY --from=build /app/dist/hands-on.html /usr/share/nginx/html/index.html\n - Adds image labels using ARG VERSION and ARG VCS_REF:\n - org.opencontainers.image.title\n - org.opencontainers.image.description\n - org.opencontainers.image.version\n - org.opencontainers.image.revision\n - Exposes 80 and configures a HEALTHCHECK:\n - HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 CMD wget -q -O - http://127.0.0.1/healthz || exit 1\n- nginx.self-contained.conf behavior:\n - Provides a /healthz endpoint returning 'ok' (text/plain)\n - Serves the index at / and has SPA fallback as in production\n - Does not proxy /api/ — the self-contained build is intended to be fully static (no backend proxying)\n\nHow this module connects to the rest of the codebase\n- ui/: The Dockerfiles depend on the frontend source tree under ui/. The build stage runs npm scripts defined in ui/package.json (npm run build or npm run build:single).\n- backend service: The production nginx.conf expects a backend service available at backend:8000 to handle API requests under /api/. In a multi-container deployment (for example docker-compose), the backend service should be named backend and listen on port 8000 to match the proxy_pass target.\n- CI/release: docker/ui/Dockerfile.self-contained optionally picks up VERSION and VCS_REF build-time args to tag the image with metadata labels. CI can supply these args when building release images.\n\nBuild and run examples\n\nBuild production image\n- From repository root:\n - docker build -f docker/ui/Dockerfile -t myrepo/ui:latest .\n- Run:\n - docker run --rm -p 80:80 --network my-app-network myrepo/ui:latest\n - Ensure a backend service reachable at backend:8000 exists on the same Docker network if API calls are required.\n\nBuild development image (local dev)\n- docker build -f docker/ui/Dockerfile.dev -t myrepo/ui:dev .\n- Run:\n - docker run --rm -p 3000:3000 -v \"$(pwd)/ui:/app\" myrepo/ui:dev\n - The volume mount is optional but useful to edit source on host and have dev server pick up changes.\n\nBuild self-contained image (release)\n- With version and VCS ref:\n - docker build -f docker/ui/Dockerfile.self-contained --build-arg VERSION=1.2.3 --build-arg VCS_REF=abcdef -t myrepo/ui:self-contained:1.2.3 .\n- Run:\n - docker run --rm -p 80:80 myrepo/ui:self-contained:1.2.3\n - Health endpoint: GET http://<host>:80/healthz should return 'ok'\n\nImportant implementation details and behaviors\n- Dependency installation order (optimization):\n - Each Dockerfile copies package.json and package-lock.json* and runs npm ci before copying the rest of the source. This leverages Docker layer caching to avoid reinstalling dependencies when only application source changes.\n- Build output locations:\n - docker/ui/Dockerfile assumes static build output is at /app/build.\n - docker/ui/Dockerfile.self-contained expects /app/dist/hands-on.html and copies it to /usr/share/nginx/html/index.html. Ensure ui/package.json scripts produce these locations (npm run build vs npm run build:single).\n- nginx API proxy mapping:\n - nginx.conf maps location /api/ to proxy_pass http://backend:8000/api/ — a trailing slash is intentionally present on both location and proxy_pass to preserve the sub-path mapping (requests to /api/foo -> backend:8000/api/foo).\n - Common proxy headers are set (Host, X-Real-IP, X-Forwarded-For, X-Forwarded-Proto).\n- SPA fallback:\n - try_files $uri $uri/ /index.html ensures client-side routing works by returning index.html for non-file routes.\n- Healthcheck (self-contained):\n - Uses wget inside the container to poll /healthz. If wget is not available in a custom runtime, the healthcheck will fail; nginx:alpine includes busybox/wget in typical images used here.\n\nTroubleshooting & tips\n- 404s for client-side routes: If you see 404s when navigating directly to client routes, confirm nginx.conf is being copied into /etc/nginx/conf.d/default.conf and that try_files fallback is active.\n- API 502/host unreachable: If nginx returns upstream errors for /api/ requests, confirm the backend container/service is reachable as hostname backend on port 8000 from the nginx container (check Docker network and service names).\n- Dev server not accessible from host: Ensure npm dev server is started with --host 0.0.0.0 and port 3000 (Dockerfile.dev’s CMD sets these flags). Map ports properly when running the container.\n- Build artifacts not found: If the build stage does not generate /app/build or /app/dist/hands-on.html as expected, confirm the npm scripts in ui/package.json match the Dockerfile (npm run build and npm run build:single).\n- Caching and CDN headers: nginx config provided is minimal; if you need long-lived caching headers for static assets in production, extend nginx.conf accordingly.\n\nSmall architecture diagram\n- The diagram below is a high-level view of the two main runtime patterns: production (nginx + backend) and self-contained (static only).\n\ngraph LR\n A[ui/ source] --> B[Build stage (node:22-slim)]\n B --> C[Production image (nginx serving /usr/share/nginx/html)]\n C --> D[Proxies /api/* to backend:8000]\n B --> E[Self-contained image (nginx serving single file)]\n E --> F[exposes /healthz]\n\n(If using diagrams in Markdown viewers, the above shows the intended flow: source -> build -> nginx image; production nginx proxies to backend, self-contained serves static file + healthz.)\n\nContributing and changes\n- If you change frontend build output paths or npm scripts, update the corresponding Dockerfile to COPY the correct artifact paths.\n- If the backend service name or port changes, update docker/ui/nginx.conf proxy_pass target to match the service discovery in your orchestration (docker-compose service name, Kubernetes Service, etc.).\n- When updating base images (node:22-slim or nginx:alpine), test builds and runtime behavior (especially for wget in healthcheck and compatibility of built assets).\n\nThis module contains only containerization artifacts; the actual UI implementation is in ui/. The Dockerfiles and nginx configurations are the authoritative sources for how the UI is built and served in different deployment scenarios.","other-docs":"# Other — docs\n\nModule: Other — docs\n\nOverview\n- This module contains the system-level design and the traceability artifacts for \"Planification Centre de la main\". It is documentation, not executable code. The primary sources are:\n - docs/SDS_Planification_CM.md — the Software Design Specification (SDS) describing architecture, data model, constraints, optimization, UI, workflows and deployment.\n - docs/SDS-traceability-matrix.md — a mapping between SDS requirements (SDS-1xxx … SDS-13xxx) and their implementation status in the codebase.\n\nPurpose\n- Provide a single authoritative normative description of the system's:\n - functional requirements and non‑functional constraints,\n - data model and domain concepts,\n - optimization model and solver modes,\n - UI behaviors and permission model,\n - architecture choices for both browser-only and hosted deployments.\n- Record and maintain traceability between requirements and concrete code locations, test coverage and operational gaps (PoC limitations).\n- Serve as the reference for reviewers, implementers and auditors to validate whether code changes satisfy a requirement and to guide future design decisions.\n\nFiles\n- docs/SDS_Planification_CM.md\n - Full SDS (French). Sections include introduction, problem description, scope, domain entities, use cases and workflows, detailed specifications per SDS id, architecture (browser + hosted), component diagrams, and appendices (glossary, requirements list).\n - Use it as the canonical spec when adding or changing behavior. It contains the IDs (SDS-####) used by the traceability matrix.\n- docs/SDS-traceability-matrix.md\n - A per-requirement status table (Implémenté / Partiel / Non implémenté) with pointers to where in the repository the requirement is satisfied (function names, components, tests) or notes on partial coverage or missing pieces.\n - Includes the reading legend and assumptions (which variant — [BROWSER] vs [HOSTED] — the evaluation relates to).\n\nHow to read the traceability matrix\n- Each SDS requirement (SDS-1xxx … SDS-13xxx) is one entry. For each:\n - Status keys:\n - Implémenté — verified behavior in code and wired to a real execution flow.\n - Partiel — capability exists but incomplete, limited, or not wired to primary UI flows.\n - Non implémenté — no verified implementation.\n - Scope tags:\n - [BROWSER] — PoC/browser-only evaluation.\n - [HOSTED] — hosted/backend evaluation.\n- The matrix often references concrete code symbols and components (example entries include deduplicateColumns(), addOpRow(), convertAbsentConsultToClosed(), deriveWeeklyFromTemplate(), RoomManager, ActivityCatalog.svelte, GuardRoster, engine/types.ts, ui/src/lib/engine.ts, ui/src/lib/solver.worker.ts). Use these strings as search targets when validating the claim.\n- When a requirement is marked Implémenté, the entry typically lists the component and sometimes the tests that exercise it (for example test_max_staffing_urg_cap).\n\nKey components referenced by the docs (quick map)\n- Frontend UI artifacts (Svelte):\n - RoomManager, ActivityCatalog.svelte, CellModal, VersionDiffViewer, GuardRoster, HsGauge, PhysicianManager, RuleEditor, Topbar — UI components named in the SDS that implement interaction patterns described in the spec.\n- Bridge / types:\n - engine/types.ts and planningBridge.ts — the contract between UI and solver; types and serialization helpers are defined here.\n- Solver integration:\n - ui/src/lib/engine.ts — TypeScript facade to the engine (Rust/WASM).\n - ui/src/lib/solver.worker.ts — Web Worker wrapper for off‑UI-thread solver execution (SDS-3011).\n- Engine internals (described by SDS):\n - rust engine modules (bindings.rs, solver.rs, cpsat.rs, equity.rs, resilience.rs, diff.rs, repair.rs) are referenced in the SDS architecture diagrams and behaviors.\n- Persistence:\n - sql.js (SQLite WASM), .hands-on export/import — described as browser persistence (SDS-10010).\n - PostgreSQL schema and hosted persistence referenced for the hosted variant (SDS-10011).\n- Helpers and domain functions named in the matrix:\n - deduplicateColumns(), addOpRow(), removeOpRow(), convertAbsentConsultToClosed(), deriveWeeklyFromTemplate(), syncAbsencesToGrid(), estimateHs(), pinRecupHs(), applySelectedSolverChanges(), impactReport(), exportPdfA3(), replaceGuards(), purgeOldVersionsSql(), VersionDiffViewer.\n - These identifiers are entry points to check implementation coverage and are useful when updating the traceability status.\n\nHow the SDS and the traceability matrix are expected to be used by developers\n- Reference for design decisions\n - Before implementing a feature or changing behavior, consult SDS_Planification_CM.md to ensure the change fits the specified model (entities, rules, constraints and permitted exceptions).\n- Mapping code to requirements\n - When a developer implements or modifies behavior tied to an SDS ID, update docs/SDS-traceability-matrix.md:\n - Add the code locations (file path + function/component name) that implement or change fulfillment of the SDS entry.\n - Use the status fields to reflect current verification: Implémenté / Partiel / Non implémenté.\n - When an SDS entry transitions from Partiel/Non implémenté to Implémenté, add a short note with the PR or commit that proves the implementation.\n- Validation and review\n - Reviewers should cross-check PRs against the SDS IDs affected. A change that claims to satisfy an SDS must list where tests and code implement the requirement.\n - Automated checks (if present) should surface missing updates to docs/SDS-traceability-matrix.md for PRs that touch code paths referenced by an SDS.\n\nMaintaining traceability: practical workflow\n1. Implement or change code for a requirement.\n2. Add/modify unit or integration tests that demonstrate the required behavior (refer to test names from the matrix when present).\n3. Update docs/SDS-traceability-matrix.md:\n - add the code path(s) (component/file/function/test) that realize the requirement,\n - set the status to Implémenté (or Partiel) and add a one-line justification referencing a commit or PR.\n4. Update SDS_Planification_CM.md only when the design in the SDS itself must change (architectural or conceptual change). For implementation details, prefer the traceability matrix to record mapping of code to requirement.\n5. In the PR description include links to the SDS IDs that the change addresses and the updated traceability lines.\n\nConnecting the docs to the repository and tests\n- Use repo search to find where the SDS concepts are implemented:\n - Search for exact SDS identifiers or function names mentioned in the matrix (e.g., deduplicateColumns, convertAbsentConsultToClosed).\n - For rule/constraint implementations, look under engine/rules.ts and solver modules; for UI behavior, search ui/src/components and ui/src/lib.\n - Tests: the traceability matrix calls out tests when available (e.g., test_max_staffing_urg_cap); when adding tests, follow existing naming patterns to make automated discovery straightforward.\n- Keep the SDS conservative: the matrix is explicit that \"Implémenté\" means both code presence and wiring to a real user flow — a unit test alone is not sufficient evidence without integration or UI wiring where required.\n\nLocalization, tone and ownership\n- The SDS and traceability matrix are written in French. Keep language consistent when editing them. Add English summaries only if a project-localized copy is added; do not edit the canonical French files unless you are the owner of the doc change.\n- Ownership: the docs do not assert a single code owner. When updating content that asserts an implementation, include an author and date in the matrix line to track who validated the mapping.\n\nSmall architecture diagram (docs ↔ code)\n- The diagram below helps visualize how these documents relate to the code areas they describe:\n\n```mermaid\nflowchart LR\n Docs[docs/ (SDS and Traceability)] --> UI[\"ui/ (SvelteKit, components)\"]\n Docs --> Engine[\"engine/ (Rust/WASM, solver)\"]\n Docs --> Persistence[\"persistence (sql.js / postgres)\"]\n Docs --> Hosted[\"backend/ (FastAPI, adapters)\"]\n UI --> Engine\n Engine --> Persistence\n Hosted --> Engine\n```\n\nNotes and caveats recorded in the docs\n- The traceability matrix includes explicit PoC limitations (features missing on the browser variant, hosted-only features not implemented in PoC). When you implement a hosted feature or fill a PoC gap, update both:\n - the SDS-traceability-matrix.md entry to reflect new status,\n - possibly SDS_Planification_CM.md if the design assumption changes.\n- The matrix and SDS are living artifacts: treat them as part of the codebase; changes to code that alter system behavior must be reflected here.\n\nQuick checklist for contributors\n- Does your change affect a requirement referenced by an SDS id? If yes:\n - Add/extend tests exercising the behavior.\n - Update docs/SDS-traceability-matrix.md with code locations and status justification.\n- Does your change alter the architecture, data model or external contract? If yes:\n - Propose an update to docs/SDS_Planification_CM.md explaining the rationale and the exact changes to the spec.\n - Notify reviewers and ops (for deployment impact, migration or persistence schema changes).\n- For imports/exports, solver behavior, persistence schema or auth changes, cross-check the SDS sections (SDS-9xxx, SDS-3xxx, SDS-10xxx, SDS-11xxx) to ensure consistency.\n\nCall graph / execution flows for this module\n- This module is documentation-only. There are no runtime call graph or execution flows originating from the docs files themselves. The \"Call Graph & Execution Flows\" section at the end of docs/SDS-traceability-matrix.md correctly shows \"None\".\n\nWhere to make future docs edits\n- Small changes or clarifications: edit the corresponding file under docs/.\n- Large design changes: open a design RFC or PR that updates docs/SDS_Planification_CM.md and align the traceability matrix concurrently.\n- When adding new SDS entries: follow the existing ID pattern (SDS-1xxx … SDS-13xxx) and add a corresponding entry to the matrix with initial status Non implémenté or Partiel.\n\nContact and ownership practices\n- When you assert \"Implémenté\" in the traceability matrix, include:\n - file paths and exported symbols (component/function names),\n - test names and/or PR/commit references,\n - short note on the scope of validation (unit, integration, UI flow).\n- Prefer small, incremental doc updates included in the same PR as the code change that implements the requirement.\n\nSummary\n- docs/SDS_Planification_CM.md is the canonical design and requirements spec.\n- docs/SDS-traceability-matrix.md connects requirements to the codebase and indicates implementation status.\n- Treat both as first-class repository artifacts: keep them updated whenever code or behavior changes, reference code symbols when claiming implementation, and add tests to substantiate claims.","other-engine":"# Other — engine\n\nengine — cm-plan-engine\n=======================\n\nSummary\n-------\nThe cm-plan-engine crate is the WebAssembly-compiled planning engine used by the application. It is intended to run entirely in the browser and to interoperate with JavaScript through wasm-bindgen. The crate packaging and build configuration are declared in engine/Cargo.toml.\n\nKey points:\n- Package name: cm-plan-engine (version 0.6.2)\n- Rust edition: 2021\n- Crate types: cdylib and rlib (cdylib is the WebAssembly entry artifact used with wasm-bindgen)\n- Purpose: compile Rust engine logic to WebAssembly for use in a browser environment\n\nCargo configuration (what matters)\n---------------------------------\nThe Cargo.toml in this crate declares the package metadata, crate types, dependencies, and a release optimization profile that is tuned for small wasm binaries.\n\nImportant fields:\n- [package]\n - name: \"cm-plan-engine\"\n - version: \"0.6.2\"\n - edition: \"2021\"\n - description: \"CM Planning engine compiled to WebAssembly — runs entirely in the browser\"\n- [lib]\n - crate-type = [\"cdylib\", \"rlib\"]\n - cdylib: produces a wasm-compatible dynamic library for use with wasm-bindgen\n - rlib: allows linkage as a Rust library if needed in other Rust crates\n- [profile.release]\n - opt-level = \"s\" (optimize for size)\n - lto = true (link-time optimization enabled)\n\nDependencies and why they’re present\n-----------------------------------\nThe crate is configured with dependencies focused on WebAssembly/browser interop, serialization, time handling, unique IDs, randomness suitable for JS, and ergonomic error types.\n\n- wasm-bindgen = \"0.2\"\n - Primary bridge between Rust and JavaScript. Exposes Rust functions, types and structs to JS and allows calling JS from Rust as needed.\n\n- serde = { version = \"1\", features = [\"derive\"] }\n - Serialization / deserialization for data sent to/from JS or persisted as JSON. The derive feature enables serde_derive macros.\n\n- serde_json = \"1\"\n - JSON parsing and rendering. Useful for exchanging structured data with JavaScript or for diagnostics.\n\n- js-sys = \"0.3\"\n - Low-level bindings to JavaScript global objects and types (Array, Object, Promise, etc.). Use for fine-grained JS interop when wasm-bindgen’s higher-level helpers aren’t sufficient.\n\n- web-sys = { version = \"0.3\", features = [\"console\"] }\n - Web APIs bindings. The \"console\" feature is enabled to use browser console logging (web_sys::console::log_1 and friends) from Rust.\n\n- chrono = { version = \"0.4\", default-features = false, features = [\"alloc\", \"serde\", \"wasmbind\", \"clock\"] }\n - Date/time utilities. The configured features indicate:\n - alloc: allow usage without std (useful for no_std-ish builds in Wasm)\n - serde: enable (de)serialization of chrono types\n - wasmbind: enable any wasm-specific bindings (when chrono exposes them)\n - clock: enable the clock feature needed for retrieving time in Wasm-friendly environments\n\n- uuid = { version = \"1\", features = [\"v4\", \"serde\", \"js\"] }\n - Universally unique identifiers. Enabled features:\n - v4: generate random UUIDv4 values\n - serde: serialize/deserialize UUIDs\n - js: JS-friendly support where required\n\n- getrandom = { version = \"0.2\", features = [\"js\"] }\n - Secure randomness source that is wired to JS/Wasmtime randomness APIs when compiled for wasm32 targets (feature \"js\").\n\n- thiserror = \"2\"\n - Convenient derive-based error types for Rust library errors (used to implement Error/Display for crate-specific errors).\n\nDev-dependencies\n----------------\n- wasm-bindgen-test = \"0.3\"\n - Test harness for running Rust tests in a wasm environment (e.g., headless browser runners). Use this for integration-style tests that need to execute wasm in JS.\n\nHow the crate is intended to be built and used\n----------------------------------------------\nHigh-level build flow (typical for wasm Rust crates):\n1. Build the crate for the wasm32-unknown-unknown target:\n - cargo build --release --target wasm32-unknown-unknown\n2. Run wasm-bindgen on the produced .wasm file to generate JS bindings for the browser:\n - wasm-bindgen target/wasm32-unknown-unknown/release/cm-plan-engine.wasm --out-dir pkg --target web\n (or use wasm-pack which wraps this flow)\n3. Import the generated JS module in browser code to load and call into the engine.\n\nNotes:\n- The presence of wasm-bindgen and web-sys/js-sys implies bindings are expected between Rust and JavaScript. The crate-type cdylib ensures the produced artifact is compatible with wasm-bindgen’s expectations.\n- The cronologically-oriented dependencies (chrono) and the uuid/getrandom set indicate the engine performs time-based operations and requires secure randomness in the browser.\n- Size is prioritized in the release profile to keep wasm download size small (opt-level = \"s\", lto = true).\n\nTesting\n-------\n- wasm-bindgen-test is declared as a dev-dependency. Use wasm-bindgen-test to run tests in a wasm environment:\n - cargo test --target wasm32-unknown-unknown --release (plus a test runner or wasm-bindgen test harness)\n- For local development, wasm-pack test or headless browser test runners (like headless-chrome-runner) are commonly used to run wasm-bindgen-test suites.\n\nIntegration points with the rest of the codebase\n------------------------------------------------\n- The engine crate compiles to WebAssembly and is intended to be loaded from the frontend (browser). It is the computational core that performs planning work on the client side.\n- Communication with the rest of the application is expected to occur via:\n - JavaScript/CSS/HTML UI code that imports the generated wasm-bindgen module and sends/receives JSON or structured data\n - Serialization through serde/serde_json for passing structured data across the boundary\n - Console logging via web-sys::console for browser-side logging and diagnostics\n- Because the crate exposes a wasm-bindgen interface, its public Rust exports (annotated with #[wasm_bindgen]) form the API surface that the frontend will call. (Those functions/types are defined in the Rust source — not shown in Cargo.toml.)\n\nPerformance and size considerations\n-----------------------------------\n- opt-level = \"s\" and LTO = true are used to reduce the size of the produced wasm. Expect builds to emphasize download size over raw CPU performance.\n- Be mindful of which Rust dependencies are pulled into the final wasm binary. Enabling or disabling features on dependencies like chrono and uuid can materially affect binary size.\n\nCommon developer tasks and tips\n-------------------------------\n- Building for development:\n - Use cargo build --target wasm32-unknown-unknown for iterative builds, but be aware of wasm-bindgen step required to use resulting .wasm in the browser.\n - Consider wasm-pack for a smoother developer experience (it runs cargo build + wasm-bindgen and produces an npm-style package).\n- Debugging:\n - Use web-sys::console::log_1 (console feature enabled) for logging from Rust to browser console.\n - For panics, enable console_error_panic_hook in Rust code (not declared in Cargo.toml). If you add it, ensure it is compiled in debug builds to get readable stack traces in the browser console.\n- Randomness and UUIDs:\n - getrandom feature \"js\" routes randomness to the browser’s crypto.getRandomValues; this is preferable for generating UUIDv4 (uuid crate) in the browser.\n- Time:\n - chrono features include wasmbind/clock to ensure chrono’s time functions behave correctly in wasm environments. If you manipulate time heavily, validate chrono’s behavior in the browser target.\n\nMermaid diagram — high-level integration\n---------------------------------------\nThis small diagram shows the logical relationships relevant to this crate.\n\n```mermaid\nflowchart LR\n A[cm-plan-engine (Rust/wasm)] --> B[wasm-bindgen-generated JS glue]\n B --> C[Browser JS (frontend)]\n A --> D[serde / serde_json]\n A --> E[chrono]\n A --> F[uuid / getrandom]\n A --> G[web-sys / js-sys]\n```\n\nTroubleshooting\n---------------\n- If wasm-bindgen step fails:\n - Confirm the target build artifact path and wasm-bindgen CLI version match the wasm-bindgen crate version used by the crate.\n- If Unicode, date, or randomness behavior differs between native and wasm:\n - Check features on chrono, uuid, and getrandom to ensure wasm-compatible options are enabled (these are already enabled in this Cargo.toml).\n- If binary size is too large:\n - Audit dependency features, remove unnecessary features, or evaluate replacing heavy dependencies with smaller alternatives.\n - Keep opt-level = \"s\" and lto = true in release; consider enabling Linker Dead Code Stripping or further wasm optimizers (wasm-opt) post-build.\n\nVersioning and publishing\n-------------------------\n- Current crate version is 0.6.2 in Cargo.toml.\n- This crate is intended as a WebAssembly artifact consumed by the frontend; publishing to crates.io is possible (rlib produced), but the primary consumption model is via the generated wasm + JS glue for web deployment.\n\nAppendix: Files to check in the repository\n-----------------------------------------\n- engine/Cargo.toml (this file) — dependency and package config\n- engine/src/lib.rs (typical location for wasm-bindgen exports) — implement wasm interface here (not shown)\n- Build scripts or CI config (CI may contain wasm-pack, wasm-bindgen, wasm-opt steps)\n\nIf you are contributing code: ensure any changes to public exports intended for JavaScript use are annotated with wasm-bindgen attributes and that you run the wasm-bindgen generation step to validate the JS glue. Adjust Cargo.toml dependency features only when you understand the effect on wasm size and behavior in the browser.","other-pyproject-toml":"# Other — pyproject.toml\n\nOther — pyproject.toml\n\nPurpose\n- Acts as the canonical, machine- and human-readable project manifest for the cm-plan repository.\n- Declares package metadata (name, version, description, authors), runtime and optional dependencies, the build-backend, and tool configurations used by linters, type-checkers, and test runners.\n- Drives packaging, installation, CI checks, and local developer workflows. Tools that honor PEP 621 and PEP 517 read settings from this file.\n\nKey metadata fields\n- project.name: \"cm-plan\" — package name.\n- project.version: \"0.6.2\" — current package version.\n- project.description: \"Planning engine for Centre de la main (CM) — constraint-based scheduling\" — brief purpose of the project.\n- project.readme: \"README.md\" — path to long description.\n- project.authors: single author entry with name and email.\n- project.requires-python: \">=3.12\" — minimum Python version required. Developers and CI must run Python 3.12 or newer.\n\nRuntime dependencies (project.dependencies)\nThese dependencies are required at runtime for the planning engine and associated APIs:\n- pydantic>=2.10 — data validation and configuration models.\n- ortools>=9.12 — core constraint solver / scheduling engine.\n- pyyaml>=6.0 — YAML parsing (likely for configuration or import/export).\n- fastapi>=0.115 — HTTP API framework used by the service.\n- uvicorn[standard]>=0.34 — ASGI server for running the FastAPI app.\n- bcrypt>=4.0 — password hashing (authentication).\n- python-jose[cryptography]>=3.3 — JWT handling and JWT-related crypto.\n- python-multipart>=0.0.20 — multipart form data parsing (file uploads).\n- icalendar>=6.0 — iCalendar (.ics) read/write (export/import schedules).\n- openpyxl>=3.1 — Excel read/write support.\n- pypdf>=5.4 — PDF reading/manipulation.\n- psycopg[binary]>=3.2 — PostgreSQL driver (binary extension) for DB access.\n- httpx>=0.27 — HTTP client for external requests (sync/async).\n\nNotes:\n- Version specifiers use a minimum version strategy (>=), except for the build-system where a range is pinned. The minimums reflect the features and fixes required by the codebase.\n- The listed libraries indicate runtime responsibilities: scheduling/constraint solving (ortools), API/server (FastAPI + Uvicorn), data models (pydantic), persistence (psycopg), and import/export helpers (icalendar, openpyxl, pypdf).\n\nOptional (development) dependencies\n- project.optional-dependencies.dev contains developer tooling:\n - pytest>=8.0 — test runner.\n - pytest-cov>=6.0 — coverage reporting for pytest.\n - anyio[trio]>=4.0 — async testing and concurrency backend helper.\n - ruff>=0.11 — fast linter/formatter for Python.\n - mypy>=1.14 — static type checker.\n\nInstall tips:\n- Install runtime deps (packaged install): python -m pip install .\n- Install editable + dev extras: python -m pip install -e '.[dev]'\n - Quoting may be necessary depending on the shell.\n\nBuild system\n- build-system.requires = [\"uv_build>=0.8.22,<0.9.0\"]\n- build-system.build-backend = \"uv_build\"\n\nImplications:\n- Packaging and building are delegated to the uv_build PEP 517 backend. The version range is pinned to avoid incompatible changes from major uv_build releases.\n- Standard PEP 517-build tools (pip, python -m build) will invoke uv_build to produce wheels/sdist when building or installing.\n\nTool configuration\n- [tool.ruff]\n - target-version = \"py312\" — ruff treats code as Python 3.12.\n - line-length = 100 — maximum allowed line length (overrides ruff default).\n- [tool.ruff.lint]\n - select = [\"E\", \"F\", \"W\", \"I\", \"UP\", \"B\", \"SIM\", \"RUF\"]\n - This narrows ruff's checks to a curated set of error/warning categories used by the project.\n- [tool.mypy]\n - python_version = \"3.12\"\n - strict = true — enables mypy's strict mode (enforces a high bar for annotations, optionality, and correctness).\n- [tool.pytest.ini_options]\n - testpaths = [\"tests\"] — pytest will look for tests in the tests/ directory.\n - addopts = \"-v --tb=short\" — default pytest CLI options (verbose, short tracebacks).\n\nDeveloper workflows (common commands)\n- Install development environment\n - python -m pip install -e '.[dev]'\n- Run test suite\n - pytest\n - or python -m pytest\n - (pytest picks up addopts from pyproject)\n- Run linter (ruff)\n - ruff check .\n - ruff will use target-version and lint selection from pyproject.toml.\n- Run static type checking (mypy)\n - mypy .\n - or python -m mypy .\n - mypy will use python_version and strictness from pyproject.toml.\n- Build distributions (PEP 517)\n - python -m build\n - This triggers the uv_build backend specified by build-system.\n- Install package for runtime usage\n - python -m pip install .\n - For editable development: python -m pip install -e .\n\nHow pyproject.toml connects to the rest of the codebase\n- Packaging and distribution: metadata (name, version, readme) and build-backend determine how the repository is packaged for release and installation.\n- Runtime behavior: runtime dependencies enumerate the external libraries the code imports at runtime (e.g., ortools used inside solver modules, fastapi/uvicorn for the API layer, psycopg for database access). Installing the package (pip install .) resolves and installs these dependencies.\n- Development and CI: tool configuration sections (ruff, mypy, pytest) centralize the static-analysis and test settings used by local developers and CI pipelines. CI pipelines should rely on the entries here to ensure consistent checks.\n- Tests: pytest configuration points pytest at the tests/ directory; test authors should place tests there to be discovered.\n- Type-safety and linting: mypy strict mode and ruff settings enforce project-wide style and correctness expectations; contributors should run these tools and fix violations prior to submitting changes.\n\nMini architecture diagram\n- Helps visualize the role of pyproject.toml in the developer/CI toolchain:\n\nmermaid\nflowchart LR\n A[pyproject.toml] --> B[build-system: uv_build]\n A --> C[project.dependencies]\n A --> D[project.optional-dependencies.dev]\n A --> E[tool.ruff / tool.mypy / tool.pytest]\n B --> F[packaging (wheel, sdist)]\n C --> G[runtime environment (FastAPI, ortools, DB)]\n E --> H[developer & CI checks]\n\nTroubleshooting and notes\n- Verify Python version: The project requires Python 3.12+. Running tools or installing with older Python will fail.\n- Editable install gotchas: On some shells the extras bracket syntax needs quoting: python -m pip install -e '.[dev]'\n- Tool versions: The dev extras list minimum versions. If your local ruff/mypy are older than the specified minimum, upgrade them; CI should install the dev extras to match the environment.\n- Build backend errors: If a build fails referencing uv_build, ensure pip and build metadata are up-to-date and that uv_build version matching the range is available in your environment.\n- Lint/type-check failures: The project enables strict mypy and a curated ruff rule set. Expect to run mypy/ruff and iteratively resolve issues while contributing.\n\nSummary\npyproject.toml is the single source of truth for packaging, dependencies, and developer tooling in cm-plan. It defines the runtime requirements (constraint solver, API server, DB driver, import/export helpers), a dedicated build-backend (uv_build), and developer tooling configuration (ruff, mypy, pytest). Follow the commands in the Developer workflows section to install, test, lint, type-check, and build the project consistently with CI and other contributors.","other-readme-md":"# Other — README.md\n\n# Planification CM (PoC) — README\n\nRésumé\n- Projet preuve de concept pour la planification du Centre de la main (CM).\n- UI principale en SvelteKit (réutilisée pour deux variantes de déploiement).\n- Deux variantes :\n - Variante serveur (par défaut) : Python / OR-Tools CP-SAT via FastAPI + SQLite/PostgreSQL.\n - Variante navigateur : moteur Rust/WASM CP-SAT + sql.js pour persistance, tout dans le navigateur.\n\nBut développeur : ce README décrit l'architecture, les composants clés, les commandes de développement/build/test, le mécanisme de variante et les points d'attention pour contribuer.\n\nPrincipales fonctionnalités (courte)\n- Génération et réparation de plannings, simulation, analyse d'équité et résilience.\n- Édition cellule par cellule d'une grille hebdomadaire, standard semestriel, absences/guardes avec persistance locale dans la variante navigateur.\n- Auth locale multi-utilisateurs, historique local (undo), audit local, export PDF A3 (impression navigateur).\n\nArborescence importante\n- src/cm_plan/ — domaine, backend Python, API FastAPI, persistance.\n- tests/ — tests unitaires et d'intégration backend (Python).\n- engine/ — moteur Rust/WASM, bindings et solveur CP-SAT.\n- ui/ — frontend SvelteKit (partagé entre les variantes).\n - ui/src/lib/api-server.ts — façade HTTP → backend Python (variante serveur).\n - ui/src/lib/api-browser.ts — façade locale → sql.js + moteur Rust/WASM (variante navigateur).\n - ui/src/lib/api.ts — barrel re-export résolu par Vite selon la variante.\n - ui/src/lib/engine.ts — façade TS → moteur Rust/WASM.\n - ui/src/lib/solver.worker.ts — Web Worker autour du solveur WASM.\n - ui/src/lib/engine/ — types, règles et helpers TS partagés.\n - ui/src/lib/db.ts — couche sql.js (variante navigateur).\n - ui/src/lib/planningBridge.ts — pont serveur pour plannings (variante serveur).\n - ui/src/lib/components/ — composants Svelte partagés.\n - ui/src/lib/stores/ — stores Svelte partagés.\n- docs/ — spécifications, matrice de traçabilité.\n- docker/ — Dockerfiles backend, engine et UI (prod/dev).\n\nMécanisme de variante (essentiel pour contributeurs)\n- Le plugin Vite cmVariantPlugin redirige les imports de $lib/api vers ui/src/lib/api-server.ts ou ui/src/lib/api-browser.ts.\n- Sélection de la variante :\n - Par variable d'environnement CM_VARIANT lors du build.\n - Ou via le flag de build Vite/NPM --mode browser qui active la variante navigateur.\n- Conséquence : le code UI consomme toujours $lib/api et ne connaît pas la variante ; le plugin gère l'implémentation concrète.\n\nArchitecture (vue simplifiée)\n\n```mermaid\nflowchart LR\n UI[\"ui (SvelteKit)\"] -->|import $lib/api →| ApiImpl\n subgraph Variant\n ApiServer[\"ui/src/lib/api-server.ts\"]\n ApiBrowser[\"ui/src/lib/api-browser.ts\"]\n end\n ApiServer -->|HTTP| Backend[\"FastAPI (src/cm_plan)\"]\n Backend -->|calls| EngineServer[\"Rust engine (Docker or native)\"]\n ApiBrowser -->|local| SqlJs[\"sql.js (ui/src/lib/db.ts)\"]\n ApiBrowser -->|WASM| EngineWasm[\"engine/ (Rust→WASM)\"]\n```\n\nDémarrage / déploiement\n\nVariante serveur (Docker)\n- Prérequis : Docker Desktop (ou équivalent).\n- Builder et lancer :\n - docker compose build\n - docker compose up -d\n- Par défaut la persistance est SQLite avec volume Docker sqlite_data.\n- Ports :\n - UI : http://localhost:3000\n - Backend API health : http://localhost:8000/api/health\n- Stop :\n - docker compose down\n\nVariante PostgreSQL (override)\n- docker compose -f docker-compose.yml -f docker-compose.postgres.yml up -d\n\nFichiers .env\n- Un fichier .env local peut surcharger :\n - CM_DB_PATH\n - CM_JWT_SECRET\n - CM_PG_DSN\n - Paramètres OIDC\n\nDéveloppement local\n\nBackend (serveur)\n- Installation / exécution typique :\n - uv sync --extra dev\n - uv run uvicorn cm_plan.api.app:app --reload --host 0.0.0.0 --port 8000\n- Tests/qualité :\n - uv run pytest\n - uv run ruff check .\n - uv run mypy src/\n\nFrontend\n- Entrer dans ui/ :\n - cd ui && npm install\n- Mode développement (variante serveur) :\n - npm run dev\n- Mode développement navigateur (WASM + sql.js) :\n - npm run dev:browser\n\nBuilds\n\nVariante serveur (build classique SvelteKit)\n- cd ui && npm run build\n\nVariante navigateur (site statique autonome)\n- cd ui && npm install\n- npm run build:browser\n- Résultat : ui/build/ (site statique déployable sur n'importe quel serveur de fichiers)\n\nBuild single-file (fichier HTML autonome)\n- cd ui\n- npm run build:single\n- Résultat : ui/dist/hands-on.html (HTML unique embarquant l'app)\n\nTests Frontend\n- cd ui\n- npm run check # svelte-check\n- npm test # tests unitaires moteur TS (ex. 69 tests dans PoC)\n\nPoints techniques importants pour développeurs\n\n1. Interface API abstraction\n- ui/src/lib/api-server.ts et ui/src/lib/api-browser.ts implémentent la même surface consommée par l'UI.\n- Les adaptations spécifiques à la persistance et au transport (HTTP vs sql.js) restent isolées dans ces modules.\n\n2. Moteur de contraintes\n- Deux modes :\n - Serveur : le solveur peut tourner côté backend (Rust exécutable/CGI ou service) ou Python OR-Tools selon la configuration.\n - Navigateur : moteur Rust compilé en WASM, utilisé via engine.ts et solver.worker.ts.\n- Le dossier engine/ contient le code Rust → WASM ainsi que les bindings.\n\n3. Persistance\n- Variante serveur : sqlite (fichier) ou PostgreSQL (via override docker-compose.postgres.yml).\n- Variante navigateur : sql.js (SQLite in-memory / persisted via IndexedDB/file fallback) implémentée dans ui/src/lib/db.ts.\n\n4. Authentification et sécurité\n- PoC fournit une auth locale multi-utilisateurs (stockée localement pour navigateur ; JWT possible côté serveur).\n- Attention : sécurité production non couverte (hachage de mot de passe, vérification JWT, contrôle d'autorisation backend).\n\n5. Build-time plugin\n- cmVariantPlugin (Vite) est responsable de la redirection d'import $lib/api. Pour ajouter un point d'injection similaire, modifier le plugin dans la config Vite du UI.\n\nValidation et qualité\n- Backend :\n - Tests unitaires/integration : pytest (uv run pytest)\n - Lint/type : ruff, mypy\n- Frontend :\n - svelte-check (npm run check)\n - Tests TS (npm test)\n - Builds pour les variantes : npm run build / npm run build:browser / npm run build:single\n\nDépendances Docker & images\n- docker/ contient Dockerfiles pour backend, engine et UI (images de prod/dev).\n- L'image self-contained utilise hands-on.html pour la variante single-file.\n\nOù trouver la documentation plus détaillée\n- docs/SDS_Planification_CM.md — spécification fonctionnelle et technique.\n- docs/SDS-traceability-matrix.md — matrice de traçabilité des exigences SDS.\n\nLacunes connues (à prioriser si vous contribuez)\n- Imports structurés PDF/XLSX non disponibles dans la variante navigateur.\n- Exports avancés métier (.ics, rapports d'impact, journaux de salles fermées).\n- Sécurité production (hachage mot de passe, JWT, autorisations serveur).\n- Planning standard par postes abstraits + rotation cyclique hebdomadaire.\n- Workflows avancés de configuration des heures sup, règles et salles.\n\nConseils pour contributeurs\n- Ajouter ou modifier une API côté UI :\n - Modifier ui/src/lib/api-server.ts et ui/src/lib/api-browser.ts en parallèle pour garder la parité.\n - Tester les deux variantes (npm run dev et npm run dev:browser).\n- Étendre le moteur :\n - Travailler dans engine/ ; tenir compte des bindings vers ui/src/lib/engine.ts et du worker solver.worker.ts.\n - Valider les tests TS (npm test) et reconstruire le WASM si nécessaire.\n- Ajout d'une table ou migration DB :\n - Pour le backend, adapter src/cm_plan/ et ajouter tests correspondants dans tests/.\n - Pour la variante navigateur, tenir compte de ui/src/lib/db.ts (sql.js schema).\n- Pour débogage rapide :\n - Backend : lancer uvicorn en mode --reload.\n - Frontend : npm run dev (mode serveur) ou npm run dev:browser (mode WASM).\n\nAnnexes rapides — commandes fréquemment utilisées\n- Docker compose (build + up) :\n - docker compose build\n - docker compose up -d\n- Docker compose Postgres override :\n - docker compose -f docker-compose.yml -f docker-compose.postgres.yml up -d\n- Arrêt Docker :\n - docker compose down\n- Backend dev :\n - uv run uvicorn cm_plan.api.app:app --reload --host 0.0.0.0 --port 8000\n- Frontend dev :\n - cd ui && npm run dev\n - cd ui && npm run dev:browser\n- Builds UI :\n - cd ui && npm run build\n - cd ui && npm run build:browser\n - cd ui && npm run build:single\n- Tests / checks :\n - uv run pytest\n - uv run ruff check .\n - uv run mypy src/\n - cd ui && npm run check\n - cd ui && npm test\n\nSi vous contribuez, commencez par lire docs/SDS_Planification_CM.md pour comprendre les exigences fonctionnelles et la matrice de traçabilité. Pour toute modification touchant les imports d'API dans le UI, soyez conscient du rôle du plugin Vite cmVariantPlugin afin de maintenir les deux variantes fonctionnelles.","other-src-cm-plan":"# Other — src-cm_plan\n\nModule: src/cm_plan/api\n\nSummary\n- Purpose: This package is intended to house the FastAPI backend for the CM planning engine (ticket/feature SDS-10004).\n- Current state: The package contains only an __init__.py with the single docstring \"FastAPI backend for the CM planning engine (SDS-10004).\" There are currently no functions, classes, routes, or integration points implemented in this package.\n- Impact: As shipped, this package is a placeholder. It does not register any HTTP endpoints, create an ASGI application, or call into other parts of the codebase.\n\nWhy this package exists\n- Logical separation: The planning engine needs an HTTP/REST surface to expose planning operations to other systems and UI clients. Placing FastAPI code under src/cm_plan/api keeps the HTTP layer separated from planning logic, domain models, and persistence.\n- Ticket alignment: The docstring ties the package to SDS-10004; the package is the intended location for implementing the functionality referenced by that ticket.\n\nHow it should fit into the codebase\n- Responsibility: Implement an ASGI/HTTP layer that:\n - exposes REST endpoints (or HTTP/JSON RPC) for planning operations,\n - validates and marshals requests/responses (Pydantic schemas),\n - delegates business logic to the planning engine modules elsewhere in the repository,\n - handles authentication/authorization, metrics, and request-scoped instrumentation.\n- Integration points:\n - Domain / service layer: The API package should import and call into the planning engine code (not the other way around). The planning engine remains framework-agnostic.\n - Configuration: Read environment / config layer to drive ports, CORS, feature flags, and logging.\n - Deployment: Expose a top-level ASGI callable so WSGI/ASGI servers (uvicorn/gunicorn+uvicorn workers) can mount it.\n\nRecommended structure and conventions\n- Keep the package focused on HTTP concerns: routing, request/response schemas, HTTP-specific middleware, and lifecycle events.\n- Prefer small routers organized by resource or capability (e.g., /plans, /workflows). Each router file should:\n - define Pydantic request/response models local to that API surface,\n - import a thin service interface from the planning engine (avoid importing implementation details),\n - register endpoints on a FastAPI APIRouter instance.\n- Application bootstrap:\n - Provide a single place that composes routers, middleware, exception handlers, and startup/shutdown hooks, and exposes the ASGI application to the process manager.\n - The package-level __init__.py may remain minimal; the ASGI callable is typically exported from a dedicated module so the deployment runner can import it (for example, module-level variable named by convention in your repo, though this package does not currently declare one).\n- Dependency injection:\n - Use FastAPI dependency injection for request-scoped dependencies (database sessions, authenticated user extraction).\n - Keep heavy initialization (database pools, long-lived clients) in startup lifecycle hooks, and inject lightweight handles into request dependencies.\n- Error handling:\n - Implement structured exception handlers that translate domain errors into appropriate HTTP status codes and payloads.\n - Normalize validation, authentication, and unexpected error responses for clients.\n\nPractical checklist for contributors\n- Add ASGI entrypoint: create a module under this package that constructs the FastAPI app and mounts routers and middleware.\n- Register routers: implement routers that map to planning engine capabilities; import only service interfaces from the domain layer.\n- Add schemas: define Pydantic models local to the API package for request and response validation; keep domain models mapped to/from these schemas in a thin adapter layer.\n- Implement lifecycle hooks: perform expensive initialization (DB, external clients) in startup events and cleanup in shutdown events.\n- Instrumentation: add Prometheus metrics or tracing middleware as required by the platform.\n- Tests: write unit tests for routers using FastAPI's TestClient and integration tests that exercise startup and shutdown behavior.\n- CI/deployment: ensure the ASGI callable is reachable from the deployment configuration used by the infra team (uvicorn/gunicorn entrypoint).\n\nOperational concerns\n- Configuration: keep secrets and environment-specific settings out of code. Read them from the existing configuration system used in the repo (env vars, config files). If no standard exists yet, follow repo conventions.\n- CORS and security headers: enable CORS only for the required origins, and set security headers via middleware.\n- Logging: route FastAPI logs through the repo's logging configuration. Use structured logs for request tracing (correlation IDs).\n- Observability: add tracing and metrics at the HTTP layer to trace requests into the planning engine.\n\nTesting guidance\n- Unit tests: test routers in isolation by injecting mock service implementations for the planning engine interfaces.\n- Integration tests: start the app with a test configuration and run end-to-end scenarios against an in-memory or test database/backends.\n- Contract tests: publish OpenAPI schema (FastAPI auto-generates one) and ensure clients rely on that produced schema for contract verification.\n\nMigration and contribution notes\n- Because the package is currently empty of executable behavior, introducing the API should not create breaking changes elsewhere as long as:\n - imports are added from the planning domain layer into the API package (one-way dependency).\n - no circular imports are introduced. Keep the planning engine free of FastAPI imports.\n- When adding files, follow existing repository conventions (naming, linting, packaging).\n\nFiles you might add (suggestions)\n- api/bootstrap.py or api/app.py — application composition and ASGI callable\n- api/routers/*.py — individual APIRouter modules per resource\n- api/schemas/*.py — request/response Pydantic schemas\n- api/dependencies.py — shared dependency providers for request lifecycle\n- api/middleware.py — cross-cutting HTTP middleware\n- api/exceptions.py — exception to HTTP mapping\n\nNotes from static analysis\n- No incoming or outgoing calls were detected for this module; no execution flows are present. This is consistent with the package being an implementation placeholder.\n\nIf you are implementing the FastAPI backend here\n- Keep the HTTP layer thin: orchestrate rather than implement business rules.\n- Avoid importing FastAPI into domain/service modules — the dependency direction must remain from API -> domain.\n- Add comprehensive tests and CI checks as you introduce behavior.","other-tests":"# Other — tests\n\ntests — Test suite overview and contributor guide\n\nPurpose\nThis package contains the automated test suite for the CM planning engine. Tests are organized to exercise:\n- Domain logic (rules, types, constraint evaluation)\n- Engine components (solver, simulation, equity/resilience analysers, diffing)\n- API layer (FastAPI route handlers, schemas)\n- Import/export utilities and file parsers\n- Persistence adapters (SQLite repositories and audit log)\n- Authentication/authorization utilities\nTests are written as lightweight unit and functional tests (pytest), designed to be fast and deterministic for core behaviour; some filters/tests that depend on real CHUV files are intentionally skipped when test data is absent.\n\nTop-level layout (files of interest)\n- tests/conftest.py\n - Shared fixtures and helpers used across many tests\n- tests/test_api.py\n - Route-level tests for FastAPI endpoints (solve, evaluate, simulate, equity, resilience)\n- tests/test_auth.py\n - Authentication, JWT, password hashing, user repository behaviour\n- tests/test_constraints.py\n - Constraint evaluation tests that exercise rule ids and expected violations\n- tests/test_diff.py\n - Planning diff tests (added / removed / modified)\n- tests/test_equity.py\n - Equity metric tests (weighted stddev, activity weighting)\n- tests/test_exports.py\n - Export/import helpers, .ics/.html/.json exports, anonymization, impact reports, ruleset validation/clone\n- tests/test_filters.py\n - Import filter detection and parsing against real CHUV files (skipped if data missing)\n- tests/test_imports.py\n - CSV/XLSX parsers for guards and absences; helper functions\n- tests/test_persistence.py\n - SQLite adapter tests for planning, semester context, ruleset repositories and audit log\n- tests/test_resilience.py\n - Resilience analyzer tests (traffic light indicators and shock categories)\n- additional test modules (solver, simulation, types) exercise solver integration and domain models (not exhaustively listed here)\n\nKey shared fixtures and helpers (tests/conftest.py)\nThe package-level fixtures centralize common test data to keep tests concise and consistent.\n\nImportant definitions:\n- make_physician(name: str, role: Role, seniority: Seniority | None = None, activity_rate: float = 1.0, start: date | None = None, end: date | None = None, overtime_hours: float = 0.0) -> Physician\n - Factory used everywhere to create Physician objects with sane defaults (semester 2026-02-01 → 2026-07-31 by default).\n- cadres, cdcs, mas, all_physicians\n - pytest fixtures returning small standardized teams (cadres: Role.CADRE; cdcs: Role.CDC with Seniority variants; mas: Role.MA).\n- WEEK_HALF_DAYS\n - A list of HalfDay values covering a single Mon-Fri week: 2026-02-02 → 2026-02-06, both AM and PM slots. Many tests use WEEK_HALF_DAYS[0] or other indices for deterministic dates/slots.\n- WEEK_ROOM_CAPACITIES\n - Standard room capacities for WEEK_HALF_DAYS: each half-day has two RoomCapacity entries: RoomType.OP capacity=3 and RoomType.CONSULT capacity=6. Used by many one-week context helpers.\n- week_context(all_physicians: list[Physician]) -> SemesterContext\n - One-week SemesterContext fixture for focused tests.\n- quick_week_context(physicians, absences=None, guards=None, holidays=None) -> SemesterContext\n - Helper to build a week-scoped SemesterContext with optional absences/guards/holidays for solver/evaluation tests.\n\nCommon patterns when writing tests in this suite\n- Use make_physician to create minimal Physician instances. This ensures consistent start/end dates and avoids boilerplate.\n- For single-week focused tests, use quick_week_context or the week_context fixture to obtain a SemesterContext that includes WEEK_ROOM_CAPACITIES and the standard week dates.\n- Create assignments via cm_plan.domain.types.Assignment with explicit physician_id, half_day (HalfDay), and activity_code (string).\n- Use build_default_ruleset() from cm_plan.domain.rules to get the canonical RuleSet used by many constraint tests.\n- When asserting constraint behaviour, tests often check rule identifiers and/or specific message substrings:\n - Example rule ids referenced by tests: \"H-URG-MIN\", \"H-URG-SUP\", \"H-ELIG-URG\", \"H-QUOTA-CONSULT-NOUVEAU\", \"H-HOLIDAY-BRIDGE\", \"H-CALENDAR\", \"H-CLOSURE\", \"H-NOMINATIVE\", \"H-ABSENT-OP\", \"H-RECUP-WE\"\n- For API-level tests (tests/test_api.py) the endpoints are invoked directly as callables (not via HTTP) using the pydantic request schemas (e.g., SolveRequest, EvaluateRequest, SimulateRequest). This keeps tests fast and focused on the contract.\n\nRepresentative inter-module calls and responsibilities\n- Constraint evaluation: tests call cm_plan.engine.constraint.evaluate(assignments, ruleset, ctx) and assert on returned .violations, .hard_violations, and .is_feasible.\n- Solver/integration: tests call cm_plan.engine.solver.solve(...) using quick_week_context and build_default_ruleset; assertions inspect result.is_feasible, result.status_name, and result.assignments.\n- Equity/resilience: tests call cm_plan.engine.equity.compute_equity(...) and cm_plan.engine.resilience.compute_resilience(...), asserting on returned report.scores and per-role/category indicators.\n- Import/export filters and helpers: tests call parse_guards_csv / parse_guards_xlsx / parse_absences_csv / parse_absences_xlsx, as well as higher-level import_file/detect/registered_filters in cm_plan.engine.imports and cm_plan.engine.filters.\n- Persistence: tests use connect(\":memory:\") from cm_plan.adapters.sqlite_adapter to get an in-memory SQLite connection and exercise SqlitePlanningRepository, SqliteSemesterContextRepository, SqliteRuleSetRepository, and SqliteAuditLog.\n- Auth: tests use cm_plan.api.auth_service.create_user, create_access_token, decode_access_token, hash_password, verify_password and the SqliteUserRepository adapter.\n\nTests that require external data and how they are handled\n- tests/test_filters.py includes tests that parse real CHUV planning and absence XLSX/PDFs.\n- DATA_DIR is set to a local path; tests are guarded with:\n skip_no_data = pytest.mark.skipif(not DATA_DIR.is_dir(), reason=\"Real CHUV data folder not available\")\n- If you want to run those tests locally, set up the folder and data files as expected by the test suite; CI runs without them because tests are skipped if the folder is absent.\n\nUseful test idioms and examples (patterns already used in the suite)\n- Build a minimal context and evaluate a single constraint:\n - ctx = quick_week_context([make_physician(\"MA\", Role.MA)])\n - assignments = [Assignment(physician_id=ma.id, half_day=WEEK_HALF_DAYS[0], activity_code=\"URG\")]\n - result = evaluate(assignments, build_default_ruleset(), ctx)\n - assert any(v.rule_id == \"H-URG-SUP\" for v in result.violations)\n- Create a one-week solve that acts as a reproducible reference:\n - See test_api._solve_reference() which calls solve(..., week_start=date(2026,2,2), week_end=date(2026,2,6)) and asserts result.is_feasible before returning assignments for further API/analysis tests.\n\nRuleset and constraint testing guidance\n- Tests often inspect specific rule ids and rule parameters. New rules should be:\n - Added to the build_default_ruleset (or tested separately by constructing a RuleSet).\n - Covered by constraint tests in tests/test_constraints.py that construct minimal contexts and assignments to trip or satisfy the rule.\n- RuleSet coherence tests use RuleSet.validate_coherence() and assert on error messages (duplicate ids, negative weights, presence/absence of hard rules).\n\nPersistence tests patterns\n- Use connect(\":memory:\") to get an ephemeral DB for each test.\n- Repositories under test:\n - SqlitePlanningRepository: save, get (latest and specific version), list_versions, set_status, list_plannings (with optional status filtering).\n - SqliteSemesterContextRepository: save(ctx, semester_id=...), get(sid), list_semesters()\n - SqliteRuleSetRepository: save, get, list_rulesets; tests verify round-trip of rule parameters and upsert behaviour.\n - SqliteAuditLog: record(...) and query(...) semantics (filters by planning_id, user, action, since, limit) and purge(old_than).\n\nImport/export tests and examples\n- XLSX helpers in tests create in-memory workbooks using openpyxl (see tests/test_imports.py._build_xlsx).\n- Export tests exercise:\n - export_planning_json(planning, ctx[, ruleset]) -> dict with \"format_version\"\n - anonymize_planning(planning, ctx) -> anonymized planning + mapping\n - physician_ics(physician_id, name, assignments) -> ICS string with readable activity and AM/PM phrasing\n - export_planning_html(planning, ctx, variant=None, include_alerts=False)\n - build_impact_report(before, after, ctx)\n - detect_locked_conflicts(ctx, ruleset)\n\nConstraint and domain coverage highlights\n- Constraint categories covered with dedicated tests:\n - Single-assignment per half-day (SDS-2004)\n - Absence blockers and locked-assignment overrides (SDS-4001)\n - Minimum staffing (H-URG-MIN) and supervision (H-URG-SUP)\n - Activity eligibility by role (H-ELIG-*)\n - Monthly quotas (H-QUOTA-*)\n - Holiday bridging rules (H-HOLIDAY-BRIDGE)\n - Calendar restrictions (H-CALENDAR)\n - Closures (H-CLOSURE)\n - Nominative couplings (H-NOMINATIVE)\n - Absent operator handling (H-ABSENT-OP)\n- Many tests assert on semantics in messages (e.g., presence of \"absent\", \"nominatif\", \"fermer ou réaffecter\") in addition to rule ids, ensuring that human-readable explanations are preserved.\n\nTest isolation and deterministic data\n- Dates and weeks are fixed to Feb–Jul 2026 in many fixtures (semester range and WEEK_HALF_DAYS), so tests are not affected by the current date/time.\n- Shared constants (WEEK_HALF_DAYS, WEEK_ROOM_CAPACITIES) are defined once in conftest.py and reused to keep semantics consistent across tests.\n\nRunning and extending tests — practical tips for contributors\n- Fast iteration:\n - Unit tests that don't touch SQLite or heavy parsing run quickly. Use pytest -k <substring> to target a specific test class or function.\n- Adding a new constraint test:\n - Reuse quick_week_context or _simple_context pattern (see tests/test_constraints.py).\n - Create minimal physicians with make_physician and build assignments to exercise the rule.\n - Use build_default_ruleset() unless testing a custom ruleset.\n- Adding persistence tests:\n - Use connect(\":memory:\") fixture pattern to keep DB scoped to the test and automatically closed.\n- Adding import filter tests that require XLSX:\n - Use the in-memory XLSX builder pattern in tests/test_imports.py._build_xlsx to create controlled XLSX content without external files.\n- When tests assert on rule ids or messages, prefer rule_id checks for stability; messages are fine for human-readable validation but can change during refactors.\n\nSmall architecture diagram (high-level test call relationships)\nThis diagram shows how common fixtures feed test modules and how tests exercise domain/engine/adapters. It is intentionally small and high-level.\n\ngraph LR\n A[conftest fixtures\\n(make_physician, WEEK_HALF_DAYS,\\nquick_week_context)] --> B[test modules]\n B --> C[cm_plan.domain.*\\n(build_default_ruleset, types)]\n B --> D[cm_plan.engine.*\\n(evaluate, solve,\\ncompute_equity/resilience, diff)]\n B --> E[cm_plan.adapters.sqlite_adapter\\n(Sqlite*Repository, AuditLog)]\n B --> F[cm_plan.api.*\\n(routes, auth_service)]\n B --> G[cm_plan.engine.imports/filters\\n(parse CSV/XLSX, detect)]\n style A fill:#f9f,stroke:#333,stroke-width:1px\n\nWhere tests assert behavior (examples)\n- Constraint check: result = evaluate(assignments, ruleset, ctx); inspect result.violations and result.hard_violations.\n- Solver invocation: result = solve(ctx, ruleset, mode=SolveMode.FRESH, time_limit_seconds=5.0, week_start=..., week_end=...); inspect result.is_feasible and result.assignments.\n- API endpoint test: response = await solve_endpoint(SolveRequest(...)); assert response[\"is_feasible\"] is True and response contains \"assignments\".\n- Equity/resilience: report = compute_equity(...)/compute_resilience(...); inspect report.scores for per-role/category metrics and TrafficLight values.\n\nWhere to look when tests fail\n- Constraint failure expecting specific rule_id → check cm_plan.domain.rules and cm_plan.engine.constraint for changes to rule ids, parameter names, or evaluation logic.\n- Solver regressions (different feasibility/status) → inspect solver inputs (ctx, room capacities, guards, absences) and rule weights/parameters; use the reference solve pattern in tests/test_api.py::_solve_reference to reproduce.\n- Persistence round-trip failures → inspect SQL schema migration logic in cm_plan.adapters.sqlite_adapter and the serialization/deserialization of nested domain objects.\n\nContributing tests\n- Keep tests focused, deterministic, and avoid flakiness: prefer explicit dates, use provided fixtures, avoid reliance on network or local-only data unless guarded with skip marks.\n- For heavy integration tests (real CHUV files), keep them skipped by default and document required data paths if they are to be run locally.\n- Add new rule ids to tests/test_constraints.py when adding new hard/soft constraints. Use message checks only to validate human-facing strings that must be stable.\n\nThis file-level documentation is intended to help contributors quickly find where to add tests, how to reuse fixtures, and how the test suite exercises the important modules of the CM planning engine.","other-ui-scripts":"# Other — ui-scripts\n\nUI Scripts — ui/scripts\n=======================\n\nSummary\n-------\nThis package contains small, focused Node scripts that tie the Rust-based engine build into the browser UI and produce a single-file distributable HTML. They are intended to be invoked from the ui package build/CI tasks (via npm scripts or directly with node). The three scripts are:\n\n- build-engine-wasm.mjs — runs wasm-pack in the Rust engine workspace and writes the JS/WASM artifacts into ui/src/lib/wasm.\n- verify-engine-wasm-release.mjs — loads the generated wasm glue code and verifies the Rust build profile is \"release\".\n- bundle-single.js — post-processes the SvelteKit build/index.html to create a self-contained dist/hands-on.html (inlines icon, strips modulepreload hints).\n\nLocation\n--------\nFiles live under ui/scripts:\n- ui/scripts/build-engine-wasm.mjs\n- ui/scripts/verify-engine-wasm-release.mjs\n- ui/scripts/bundle-single.js\n\nThese scripts assume the ui package layout where the UI source is in ui/, the Rust engine is in engine/, and the wasm artifacts are consumed from ui/src/lib/wasm.\n\nWhy these scripts exist\n-----------------------\n- The Rust engine is built into WebAssembly (WASM) using wasm-pack; the output must be placed inside the UI package so the browser app can import it.\n- CI and local development must ensure that the WASM artifact is a release build (optimised) and not a debug/dev build.\n- For producing an offline-friendly single-file demo (hands-on.html) we extract SvelteKit's inlined index.html and further inline the app icon and remove modulepreload hints that are invalid when loading from file://.\n\nScript details\n--------------\n\n1) build-engine-wasm.mjs\n------------------------\nPurpose\n- Invoke wasm-pack to build the Rust engine into a web-targeted WASM package and emit files into ui/src/lib/wasm.\n\nHow it works (key code patterns)\n- Resolves paths:\n - here = path.dirname(fileURLToPath(import.meta.url))\n - engineDir = path.resolve(here, '../../engine')\n - outDir = path.resolve(here, '../src/lib/wasm')\n- Calls wasm-pack via spawnSync:\n - executable: 'wasm-pack' on POSIX, 'wasm-pack.exe' on Windows\n - args: ['build', '--release', '--target', 'web', '--out-dir', outDir]\n - options: { cwd: engineDir, stdio: 'inherit' } so stdout/stderr appear in the invoking terminal\n- Error handling:\n - If spawnSync returns result.error, prints \"Failed to run wasm-pack:\" and exits with 1.\n - Otherwise exits with result.status ?? 1 (so a non-zero status from wasm-pack propagates).\n\nWhen to run\n- Run before building browser artifacts that import the wasm package (e.g., before building SvelteKit that references ui/src/lib/wasm/cm_plan_engine.js).\n- Commonly run via npm scripts such as npm run wasm:build (project-specific).\n\nImportant notes\n- Requires the wasm-pack binary in PATH and a working Rust toolchain.\n- The script uses spawnSync and inherits stdio, so wasm-pack build progress and errors are visible directly.\n- Cross-platform: accounts for Windows executable name 'wasm-pack.exe'.\n\n2) verify-engine-wasm-release.mjs\n---------------------------------\nPurpose\n- Double-check that the wasm artifacts present in ui/src/lib/wasm are built in release mode and loadable in Node. This detects accidental debug builds (which can be much larger or slower).\n\nHow it works (key code patterns)\n- Resolves file paths to the wasm JS glue and the wasm binary:\n - wasmJsPath = ui/src/lib/wasm/cm_plan_engine.js\n - wasmBinaryPath = ui/src/lib/wasm/cm_plan_engine_bg.wasm\n- Dynamically imports the glue JS via import(pathToFileURL(wasmJsPath).href).\n- Reads the binary bytes with readFile(wasmBinaryPath).\n- Calls wasmModule.default({ module_or_path: wasmBytes }) — this follows the wasm-bindgen/wasm-pack instantiated pattern where the default export accepts either a path or bytes to instantiate the wasm module.\n- Attempts to query wasmModule.wasm_build_profile() if that function exists (some wasm-pack setups export a helper to report the build profile). If the function is missing, profile falls back to 'unknown'.\n- If the detected profile is not 'release', prints an error message and exits with code 1; otherwise logs verification success.\n\nWhen to run\n- Run after build-engine-wasm.mjs (or after whatever produced ui/src/lib/wasm) to ensure the produced artifacts are release builds.\n- Useful as a CI check before packaging browser builds or publishing.\n\nImportant notes\n- This script uses top-level await and ESM import; run with Node versions that support ESM and top-level await (Node 14.8+ supports top-level await, Node 16+ recommended).\n- The script expects the wasm glue and the _bg.wasm name pattern used by wasm-pack (cm_plan_engine.js and cm_plan_engine_bg.wasm).\n\n3) bundle-single.js\n--------------------\nPurpose\n- Produce a single-file HTML distribution file (dist/hands-on.html) from SvelteKit's build/index.html. This makes the app runnable from file:// (offline or local sample distribution) by embedding all resources that would otherwise be fetched from disk/network.\n\nHow it works (key code patterns)\n- Paths:\n - buildDir = ui/build\n - src = build/index.html\n - distDir = ui/dist\n - dest = ui/dist/hands-on.html\n- Validates build/index.html exists; if not, prints an error and exits 1: \"build/index.html not found — run `npm run build:browser` first.\"\n- Reads index.html content to string.\n- Detects modulepreload hints that reference _app/immutable (a sign this is NOT an inline/browser build) using regex:\n - detection regex: /<link[^>]+modulepreload[^>]+_app\\/immutable/\n - If detected, it strips modulepreload tags with html.replace(/<link[^>]+rel=\"modulepreload\"[^>]*>/g, '') and logs a warning.\n - The detection helps identify builds that still reference external JS chunks, which will fail when opened via file:// due to CORS.\n- Inlines the app icon (hands-on.png) as a base64 data URI:\n - iconPath = static/hands-on.png\n - Reads file, encodes base64, replaces occurrences of href=\"...hands-on.png\" via regex /href=\"[^\"]*hands-on\\.png\"/g with href=\"data:image/png;base64,...\"\n - Logs success if icon inlined.\n- Ensures distDir exists, writes dest with the modified HTML, and logs the generated file size.\n\nWhen to run\n- After a browser build (SvelteKit) that produced build/index.html with inlined JS/CSS.\n- Typical npm script: npm run build:browser then node ui/scripts/bundle-single.js (or integrated into the npm script).\n\nImportant notes and caveats\n- The script assumes SvelteKit was built with a bundling strategy that inlines JS and CSS (e.g., bundleStrategy: 'inline') and expects that wasm is already inlined (the project uses sqlWasmInlinePlugin to inline wasm where necessary). If the build still references _app/immutable/*.js chunks, the generated single file will not work when opened via file://; the script strips modulepreload hints but cannot inline those chunks.\n- Inlining the icon increases the single-file size. The script logs the final file size in KB.\n- The script is a small convenience for demos/distribution, not a replacement for a proper packaging pipeline.\n\nExecution examples\n------------------\nRun wasm-pack build:\n- node ui/scripts/build-engine-wasm.mjs\n\nVerify wasm release build:\n- node ui/scripts/verify-engine-wasm-release.mjs\n\nCreate single-file HTML (after building browser):\n- node ui/scripts/bundle-single.js\n\n(If bundle-single.js is executable with a shebang, you can also run ./ui/scripts/bundle-single.js on POSIX systems.)\n\nTypical output snippets\n-----------------------\n- build-engine-wasm.mjs: wasm-pack stdout/stderr streamed directly; on spawn error:\n - \"Failed to run wasm-pack: <message>\"\n- verify-engine-wasm-release.mjs: on success:\n - \"Verified browser WASM build profile: release\"\n On mismatch:\n - \"Expected a release Rust WASM artifact in ui/src/lib/wasm, but detected '<profile>'. Run `npm run wasm:build` before using the browser variant.\"\n- bundle-single.js:\n - \"⚠ Stripped modulepreload hints (build with `npm run build:browser` for best results)\" (only if modulepreload links were present)\n - \"✓ Icon inlined as data URI\" (if icon found)\n - \"✓ dist/hands-on.html (NN KB)\" (size in KB)\n\nIntegration points with the rest of the repository\n--------------------------------------------------\n- engine/ (Rust workspace) — build-engine-wasm.mjs invokes wasm-pack in this directory and emits the result to ui/src/lib/wasm.\n- ui/src/lib/wasm — destination for wasm-pack output. Files expected include cm_plan_engine.js and cm_plan_engine_bg.wasm (the wasm-pack naming pattern).\n- SvelteKit UI build — bundle-single.js consumes build/index.html created by SvelteKit and writes dist/hands-on.html; it relies on SvelteKit being configured to inline JS/CSS for a single-file bundle or the build to already inline resources.\n- CI — verify-engine-wasm-release.mjs is suitable as a CI gate to ensure the UI is using a release WASM build.\n\nTroubleshooting & FAQs\n----------------------\nQ: wasm-pack is not found or spawnSync throws an error\n- Error: \"Failed to run wasm-pack: spawnSync ... ENOENT\" indicates wasm-pack is not in PATH. Install wasm-pack (https://rustwasm.github.io/wasm-pack/installer/) or include it in CI images.\n\nQ: verify-engine-wasm-release.mjs fails with profile 'debug' or 'unknown'\n- Ensure you ran build-engine-wasm.mjs (with --release) or ran wasm-pack build --release from the engine/ directory. The script expects a release profile; CI should fail if the profile is not 'release'.\n\nQ: bundle-single.js generates an HTML that fails to run from file:// or shows missing resources\n- If index.html references external _app/immutable/*.js assets, it means the UI build did not inline JS. Rebuild the browser with the configuration that inlines JS/CSS (e.g., bundleStrategy: 'inline') or avoid serving via file://. The script strips preload hints but cannot inline arbitrary chunks.\n\nQ: Data URI inlined icon increases output too much\n- Inlining the icon is intentional for file:// portability. If size is a concern, omit inlining or use a smaller icon; the regex that replaces href=\"...hands-on.png\" can be adjusted if you need a different behavior.\n\nImplementation notes & regex explanation\n---------------------------------------\n- modulepreload detection:\n - /<link[^>]+modulepreload[^>]+_app\\/immutable/ checks whether any modulepreload link references _app/immutable, which indicates the SvelteKit build left external chunks.\n- modulepreload stripping:\n - html.replace(/<link[^>]+rel=\"modulepreload\"[^>]*>/g, '') removes <link rel=\"modulepreload\" ...> elements to prevent unreachable preload hints in a single-file distribution.\n- icon inlining:\n - html.replace(/href=\"[^\"]*hands-on\\.png\"/g, `href=\"${dataUri}\"`) replaces both the favicon and apple-touch-icon references. It assumes the asset filename is \"hands-on.png\".\n\nSmall architecture diagram\n--------------------------\nThe following diagram shows the relationship among the scripts, the engine, and the UI build outputs.\n\ngraph TD\n A[engine/ (Rust)] -->|wasm-pack -> ui/src/lib/wasm| B[ui/src/lib/wasm]\n B -->|verify| C[verify-engine-wasm-release.mjs]\n D[SvelteKit build -> build/index.html] -->|post-process| E[bundle-single.js]\n E --> F[ui/dist/hands-on.html]\n\n(If you modify paths or wasm-pack behaviour, update the scripts accordingly.)","other-ui-src":"# Other — ui-src\n\nui/src/lib/wasm — cm-plan-engine (package.json)\n================================================\n\nPurpose\n-------\nThis directory publishes a small npm package that contains the CM Planning engine compiled to WebAssembly and the JavaScript \"glue\" needed to run it in the browser.\n\nThe package is intended to be consumed by the UI layer so the entire planning engine can execute client-side, without server calls. The package artifacts are:\n\n- cm_plan_engine_bg.wasm — the WebAssembly binary\n- cm_plan_engine.js — the JavaScript glue/runtime wrapper\n- cm_plan_engine.d.ts — TypeScript type definitions\n\nPackage metadata\n----------------\nThis module's package.json exposes these key fields:\n\n- \"name\": cm-plan-engine\n- \"type\": module (ESM)\n- \"version\": 0.6.2\n- \"files\": [\"cm_plan_engine_bg.wasm\", \"cm_plan_engine.js\", \"cm_plan_engine.d.ts\"]\n- \"main\": \"cm_plan_engine.js\" (entry-point for consumers)\n- \"types\": \"cm_plan_engine.d.ts\" (TypeScript definitions)\n- \"sideEffects\": [\"./snippets/*\"] (preserve snippet files from tree-shaking)\n\nHow it is structured\n--------------------\nThe package is a small distribution composed of three files. The JS file is the coordinator that exposes functions and bindings to the Wasm module; the .wasm file is the raw compiled engine; and the .d.ts file documents the public API for TypeScript consumers.\n\nBecause package.json sets \"type\": \"module\", consumers should treat cm_plan_engine.js as an ES module.\n\nHow to discover the public API\n------------------------------\nThis package does not hard-code any runtime initialization in the UI codebase. To know what functions/classes to call, inspect cm_plan_engine.d.ts (included in this package). The .d.ts lists exported members and any exported initialization function(s). The typical workflow is:\n\n1. Import the JS module (cm_plan_engine.js).\n2. If the module exports an async initializer, call it before using other API members.\n3. Use the exported functions/classes exactly as declared in cm_plan_engine.d.ts.\n\nExample usage patterns\n----------------------\nBelow are safe, generic usage patterns. Because specific exported function names are declared in cm_plan_engine.d.ts, treat these examples as templates — check the .d.ts for exact names.\n\nStatic import (ESM)\n- Import the module at the top of a file and then call any provided initializer (if present) before using other exports.\n\n Example pattern:\n - import * as CmPlanEngine from './cm_plan_engine.js';\n - if the package exposes an async initializer (often named init or similar), call it: await CmPlanEngine.init?.();\n - then call other exported functions.\n\nDynamic import\n- Use dynamic import / await import(...) when you want to lazy-load the engine only when needed.\n\n Example pattern:\n - const CmPlanEngine = await import('./cm_plan_engine.js');\n - await CmPlanEngine.init?.();\n - use API.\n\nNote: The exact initializer name (if any) and other exports are provided in cm_plan_engine.d.ts — inspect that file for precise names and signatures.\n\nTypeScript support\n------------------\nThe package includes cm_plan_engine.d.ts and sets \"types\" in package.json. TypeScript projects importing this package will get type information automatically. If your editor/IDE does not pick up the types, ensure your import path resolves to the package (or to the local file that contains the .d.ts).\n\nBundling and deployment considerations\n-------------------------------------\nWASM files have special handling requirements in build systems and in deployment:\n\nServing .wasm\n- The .wasm file must be available to the browser at runtime and served with the correct MIME type (application/wasm). Ensure your static file server or hosting config provides this.\n\nPath resolution\n- The JS glue code will attempt to load the .wasm file relative to the script or via a URL. When bundling, the bundler may not automatically copy the .wasm file to the output directory. Common strategies:\n - Copy cm_plan_engine_bg.wasm into your app's public/static folder so it is served at a known URL.\n - Configure your bundler to treat the .wasm file as an asset/resource (webpack asset/resource, Vite static import, etc.), and pass the resolved URL into initialization if required.\n - Use dynamic import or fetch to load the .wasm manually if the glue supports that.\n\nBundler-specific notes (common approaches)\n- Webpack:\n - Configure asset/resource (or file-loader) for *.wasm so the wasm file is emitted to the build output and its URL is available at runtime.\n- Vite:\n - Place cm_plan_engine_bg.wasm in public/ or import it with ?url (e.g., import wasmUrl from './cm_plan_engine_bg.wasm?url').\n- Rollup:\n - Use rollup-plugin-copy or asset handling to ensure the .wasm file is copied to the output folder and reachable by the JS glue at runtime.\n\nSide effects and snippets\n------------------------\nThe package.json declares a sideEffects entry: \"./snippets/*\". This indicates there are snippet files packaged alongside the main artifacts that should not be removed by bundler tree-shaking, because they may perform registration or other global effects when imported. If your build system aggressively prunes unused modules, keep this in mind: importing any snippet files intentionally, or configuring the bundler to preserve them, is necessary if your app relies on them.\n\nVersioning and releases\n-----------------------\n- The package version is 0.6.2 in this snapshot. When changing the compiled wasm or the JS glue (or the API surface described by cm_plan_engine.d.ts), update the package version and release the new artifact set together.\n- The \"files\" array in package.json ensures only the three artifacts are published (plus the snippets folder referenced by sideEffects). This minimizes package size and reduces accidental publishing of build artifacts.\n\nTroubleshooting\n---------------\n- \"Failed to fetch wasm\" or MIME type errors:\n - Confirm the wasm file is being served with application/wasm and reachable at the URL the JS glue expects.\n - If using a CDN or subpath, ensure correct base paths or pass an explicit wasm URL to the initializer if the glue supports it.\n- \"Missing export\" errors in TypeScript:\n - Open cm_plan_engine.d.ts to verify which names are exported and use those exact names.\n- \"Module not found\" during bundling:\n - Ensure the import path points to the package entry (cm_plan_engine.js) or to the local file included in your source tree, and ensure the bundler is configured to resolve .js and .wasm assets.\n\nHow this connects to the rest of the codebase\n---------------------------------------------\n- The UI consumes the cm-plan-engine package to run the planning engine entirely in the browser. The UI code imports the package's cm_plan_engine.js at runtime and uses the exported API (per cm_plan_engine.d.ts) to perform planning tasks.\n- The UI is responsible for ensuring the .wasm file is available to the browser (via static assets, bundler asset handling, or server configuration).\n- No internal runtime calls are defined in this repository snapshot (the package is self-contained distribution artifacts). To change behavior, update the compiled artifacts (typically from the upstream engine build), then publish an updated package with the new .wasm, .js, and .d.ts files.\n\nChecklist for contributors\n--------------------------\n- If you rebuild the engine:\n - Replace cm_plan_engine_bg.wasm, cm_plan_engine.js, and cm_plan_engine.d.ts with the new build outputs.\n - Increment the version in package.json.\n - If you add any global snippets, put them under snippets/ and ensure sideEffects lists them as needed.\n- If you change the JS glue API:\n - Update cm_plan_engine.d.ts accordingly.\n - Update UI code that imports and uses the package to match the new API.\n- Validate the package by importing it in a sample UI page and verifying initialization and basic calls succeed in the browser.\n\nSummary\n-------\nThis package is a focused distribution of the CM Planning engine as WebAssembly plus its JS glue and TypeScript types. Consumers should import cm_plan_engine.js, consult cm_plan_engine.d.ts for the exact API and any initialization requirements, and ensure the wasm binary is served and reachable at runtime. Packaging decisions (files array, sideEffects) are already tuned for minimal, predictable publishing.","other-ui":"# Other — ui\n\nui — UI (SvelteKit) module\n==========================\n\nPurpose\n-------\nThis directory contains the front-end application built with SvelteKit and Vite. It is a small single-page app that can be built and run in two distinct deployment modes:\n\n- server (default): served via a backend (development proxy to http://localhost:8000). This is used for normal development and server-hosted deployments.\n- browser: a fully client-side variant that runs without a backend and is suitable for file:// installs (e.g., an offline single-file distribution).\n\nThe UI module includes build scripts, SvelteKit configuration, Vite configuration (with two custom plugins), TypeScript settings, and a Vitest test configuration.\n\nKey files\n---------\n- package.json — npm scripts and dependencies for the UI package.\n- svelte.config.js — SvelteKit configuration with special adjustments for the browser variant.\n- vite.config.ts — Vite configuration. Defines:\n - cmVariantPlugin(isBrowser) — replaces $lib/api imports with the appropriate variant.\n - sqlWasmInlinePlugin() — inlines the sql-wasm WASM binary as a virtual module for the browser variant.\n - variant-aware Vite settings (optimizeDeps, server proxy, define).\n- tsconfig.json — TypeScript settings (extends SvelteKit-generated config).\n- vitest.config.ts — test runner config (aliases $lib and test includes).\n- static/sql-wasm/sql-wasm-browser.wasm — WASM file that may be inlined by the sqlWasmInlinePlugin (referenced by the plugin).\n\nHow the module is used\n----------------------\nScripts (package.json)\n\n- npm run dev — start Vite dev server (server variant).\n- npm run dev:browser — verify WASM and start Vite dev server in browser variant.\n- npm run build — build (server variant).\n- npm run build:browser — verify WASM and build browser variant.\n- npm run build:single — verify WASM, build browser variant and run node scripts/bundle-single.js to create a single-file distribution.\n- npm run wasm:build — helper script to build the engine WASM and verify the release.\n- npm run wasm:verify — verify the engine WASM release (used by browser build paths).\n- npm run test / npm run test:watch — run tests with Vitest.\n\nNotes:\n- To explicitly select the browser variant for CLI scripts, set CM_VARIANT=browser or use the dedicated :browser scripts (dev:browser, build:browser).\n- The build pipeline sets a global __CM_VARIANT__ define (string) so client code can branch at build time.\n\nSvelteKit configuration (svelte.config.js)\n------------------------------------------\nsvelte.config.js sets the adapter-static fallback to index.html and toggles a few SvelteKit options when building the browser variant (detected either via CM_VARIANT=browser or the presence of \"browser\" in process.argv):\n\nBehavior when browser variant is active:\n- paths.relative = true — enables relative asset paths so the app can run from file://\n- router.type = 'hash' — uses hash routing to avoid server-side routing requirements\n- output.bundleStrategy = 'inline' — attempts to inline resources for a compact distribution\n\nThis file centralizes the minimal runtime changes required to make the same app run either server-hosted or stand-alone in a browser.\n\nVite configuration (vite.config.ts)\n-----------------------------------\n\nOverview\n- The file exports defineConfig and composes three plugins:\n - cmVariantPlugin(isBrowser)\n - sqlWasmInlinePlugin()\n - sveltekit()\n- It sets worker.format to 'es' and defines a compile-time constant __CM_VARIANT__.\n- Variant-specific settings:\n - browser variant:\n - optimizeDeps: include 'sql.js' (pre-bundle to avoid runtime fetch of its WASM).\n - server.fs.allow: ['..'] (allow Vite dev server to serve files from parent directories).\n - server variant:\n - configure a dev-time proxy for '/api' → http://localhost:8000 (backend server).\n\ncmVariantPlugin(isBrowser)\n- Purpose: Swap imports of the app's \"api\" barrel ($lib/api) to point at a variant-specific implementation:\n - src/lib/api-browser.ts for the browser-only implementation\n - src/lib/api-server.ts for the server-backed implementation\n- Key behavior:\n - The plugin runs with enforce: 'pre' so it acts before other resolvers.\n - If an import source exactly equals '$lib/api', resolveId returns the chosen target path (absolute path to api-browser.ts or api-server.ts).\n - The plugin also tries to match resolved absolute or relative paths that end in /api or /api.ts after SvelteKit expands the $lib alias to src/lib. This handles import paths that are already resolved by another resolver but still point to the api barrel file.\n - The plugin intentionally avoids interfering with entry chunks (options?.isEntry).\n- Why this exists: The application has two different implementations of the API surface: one that communicates with a remote Python backend and another that implements the same interface in-browser (e.g., using sql.js). This plugin ensures imports resolve to the correct module for the chosen build variant.\n- Implementation details (from the file):\n - target is resolved to path.resolve(__dirname, isBrowser ? 'src/lib/api-browser.ts' : 'src/lib/api-server.ts')\n - apiBarrel is resolved to path.resolve(__dirname, 'src/lib/api.ts')\n - The plugin’s resolveId performs:\n - immediate match for literal '$lib/api'\n - otherwise, builds a resolved path (handles relative imports) and compares it (after removing .ts extension) with apiBarrel (also normalizing .ts)\n - Returns target path when matching.\n\nsqlWasmInlinePlugin()\n- Purpose: Provide a virtual module that exports the sql-wasm binary as a Uint8Array so the browser-only variant can initialize sql.js without fetching a separate WASM file (necessary when loading from file:// where fetch is blocked by CORS).\n- Virtual module identity:\n - virtualId = 'virtual:sql-wasm-binary'\n - resolvedId = '\\0' + virtualId (the leading \\0 prevents other plugins from resolving it).\n- Behavior:\n - resolveId(source) returns resolvedId when source === virtualId.\n - load(id) returns a JS module string when id === resolvedId. That module:\n - reads static/sql-wasm/sql-wasm-browser.wasm from the repository at build time and base64-encodes it (the plugin runs in Node during build).\n - returns a small script that decodes the base64 into a Uint8Array named bytes and exports it as default. The generated source looks like:\n const b = atob(\"<base64 string>\");\n const bytes = new Uint8Array(b.length);\n for (let i = 0; i < b.length; i++) bytes[i] = b.charCodeAt(i);\n export default bytes;\n- How to consume:\n - Import 'virtual:sql-wasm-binary' from client code; it will provide the binary bytes that can be passed to sql.js initialization APIs.\n- Why this exists:\n - sql.js normally loads an external WASM file via fetch. For a single-file browser distribution or file:// contexts, an inlined WASM avoids cross-origin fetch restrictions and simplifies distribution.\n\nBuild-time define and optimization\n- The config sets define: { __CM_VARIANT__: JSON.stringify(variant) }, so client code can use __CM_VARIANT__ at build time.\n- For the browser variant, optimizeDeps include: ['sql.js'] — pre-bundles sql.js to avoid dynamic loading problems.\n- server.proxy is configured for the server variant so that during dev the front-end can call /api and have it proxied to the backend (localhost:8000). This keeps front-end code unchanged between variants.\n\nTypeScript (tsconfig.json)\n--------------------------\n- Extends .svelte-kit/tsconfig.json generated by SvelteKit, and enables strict TypeScript options plus moduleResolution: 'bundler'.\n- checkJs and allowJs are enabled so JS files in the project get type-checking.\n\nTesting (vitest.config.ts)\n--------------------------\n- Adds an alias $lib → src/lib for test-time resolution (keeps imports consistent with app code).\n- test.include is restricted to src/lib/engine/__tests__/**/*.test.ts (the engine tests). Adjust this config to add additional test folders.\n\nTypical development workflows\n-----------------------------\nLocal development (server variant)\n1. Start local backend (if needed): ensure the Python backend is listening on http://localhost:8000.\n2. npm run dev — starts Vite dev server. The UI will proxy /api to the backend.\n\nLocal development (browser variant)\n1. npm run wasm:verify — ensure the engine WASM is present/verified.\n2. npm run dev:browser — starts Vite dev server in browser variant (hash router, inline bundle).\n3. Open the dev server URL. The app will use the in-browser API implementation.\n\nBuilding releases\n- Server-backed build: npm run build\n- Browser build (stand-alone): npm run build:browser\n- Single-file bundle (browser): npm run build:single (runs wasm:verify, builds in browser mode, then bundles into a single file via scripts/bundle-single.js)\n\nGuidance for contributors\n-------------------------\n- Adding a new API implementation:\n - Implement the shape defined in src/lib/api.ts and provide either src/lib/api-server.ts or src/lib/api-browser.ts.\n - The cmVariantPlugin will resolve $lib/api imports to the correct implementation automatically.\n - Avoid importing the concrete file paths directly (importing '$lib/api' ensures the variant switch works).\n- Updating the WASM binary:\n - The inline plugin reads static/sql-wasm/sql-wasm-browser.wasm at build time.\n - To update the inlined WASM used by the browser variant, replace that file and verify it with npm run wasm:verify or run npm run wasm:build to rebuild and verify.\n - After updating the file, the plugin will pick up the new bytes at the next build.\n- Writing tests:\n - Vitest is configured in vitest.config.ts. Add tests under src/lib and update test.include to cover new areas if needed.\n - The alias $lib resolves to src/lib both during app builds and tests.\n- Adding new Vite behavior:\n - Register plugins in vite.config.ts. Keep cmVariantPlugin behavior limited to swapping only the api barrel — broad path rewriting makes debugging harder.\n - If you need to consume other static binaries as virtual modules, follow the pattern in sqlWasmInlinePlugin: create a virtual module id, use resolveId to claim it, and provide a load() return string that exports the bytes.\n\nSmall architecture diagram\n--------------------------\nThis diagram shows how Vite and the two custom plugins interact at build time.\n\n```mermaid\nflowchart LR\n DevCmd[\"npm run dev / build\"]\n Vite[\"Vite (vite.config.ts)\"]\n CM[\"cmVariantPlugin(isBrowser)\"]\n WASM[\"sqlWasmInlinePlugin()\"]\n SvelteKit[\"@sveltejs/kit plugin\"]\n Output[\"Built assets (variant-specific)\"]\n\n DevCmd --> Vite\n Vite --> CM\n Vite --> WASM\n Vite --> SvelteKit\n CM --> Output\n WASM --> Output\n SvelteKit --> Output\n```\n\nImplementation caveats and gotchas\n---------------------------------\n- cmVariantPlugin only resolves known api barrel locations. If you import the API using a path not matching '$lib/api' or the resolved api barrel path, the plugin will not swap it.\n- The sqlWasmInlinePlugin always embeds the WASM in the build output by inlining it as a JS module; for large WASM files this increases bundle size. The browser variant deliberately favors a single-file, offline-capable distribution over smaller chunks.\n- The browser variant sets output.bundleStrategy = 'inline' in svelte.config.js; that can affect performance and build time. Use the browser variant only when you need an install-less distribution.\n- __CM_VARIANT__ is a build-time constant that is stringified into the bundle. If you rely on it at runtime for branching dynamic behavior, ensure you understand that it is baked into the compiled code.\n\nWhere this module connects to the rest of the repository\n--------------------------------------------------------\n- Backend API: server variant front-end proxies /api to the Python backend at http://localhost:8000 (configured in vite.config.ts server.proxy). The app's network requests are made against the API contract implemented by src/lib/api-server.ts (development) and the production backend.\n- Engine WASM: the browser variant and sql.js integration depend on the WASM engine artifact under static/sql-wasm/sql-wasm-browser.wasm. Build/verify scripts in package.json interact with scripts/build-engine-wasm.mjs and scripts/verify-engine-wasm-release.mjs in the repo root.\n- src/lib: The lib directory exposes an api barrel (src/lib/api.ts) and two implementations. Importing $lib/api in application code will be resolved to the appropriate implementation by cmVariantPlugin at build time.\n\nQuick checklist for changes\n--------------------------\n- Changing API surface: update src/lib/api.ts and both implementations (api-server.ts, api-browser.ts) to keep parity.\n- Adding a new virtual asset: follow sqlWasmInlinePlugin pattern (virtual id, resolveId, load).\n- Changing variant behavior: adjust svelte.config.js and vite.config.ts, and ensure __CM_VARIANT__ is used consistently.\n- Updating tests: modify vitest.config.ts include/alias settings when adding new test locations.\n\nThis document should provide enough detail to navigate, modify, and extend the UI module, including how the two Vite plugins work and how the browser/server build variants are selected and produced.","other":"# Other\n\nOther — Overview\n\nPurpose\nThe Other module is the repository-level glue: documentation, build artifacts, container recipes, UI integration helpers, and governance rules that make the planning system reproducible, buildable in multiple deployment modes, and safe to change. Its sub-modules cover three broad concerns that work together: system governance and specs, build/runtime artifacts and images, and developer tooling for the browser/native engine and UI.\n\nHow the pieces fit together\n- Governance & system spec\n - [AGENTS](AGENTS.md) defines developer workflows and mandatory checks (version bumps, schema-change steps, GitNexus impact-analysis flows) that protect data integrity and the repository indexability. Follow these rules whenever changing any persisted schema, version fields (see [pyproject.toml](pyproject.toml) and UI package locations), or automation that affects cross-module contracts.\n - [docs](docs) contains the authoritative SDS and traceability matrix that drive implementation and test priorities.\n\n- Build artifacts and packaging\n - [engine](engine) produces the Rust planning engine crate and is the source of the WebAssembly artifacts.\n - [ui-scripts](ui-scripts) contains the orchestration scripts that build the engine to WASM (build-engine-wasm.mjs), verify release builds, and produce a single-file UI bundle. Those scripts move artifacts into [ui-src](ui-src) (the published npm package under ui/src/lib/wasm).\n - [pyproject.toml](pyproject.toml) is the canonical Python project manifest used for packaging the backend components and for CI checks. It, together with UI package metadata, is the single source of truth for versions referenced by [AGENTS](AGENTS.md).\n\n- Runtime and deployment options\n - Two runtime modes are supported:\n - Browser-only: UI consumes the WASM package from [ui-src](ui-src) and runs the engine client-side (the \"browser\" variant described in [README](README.md)). This path is driven by the wasm build + [ui](ui) build and the ui-scripts bundle flow.\n - Server-backed: services run as containers. Images are produced and/or assembled by:\n - [docker-engine](docker-engine) — image for the engine service (ASGI server exposing cm_plan.api.engine_app:app).\n - [docker-backend](docker-backend) — backend images (production/dev variants).\n - [docker-ui](docker-ui) — UI images (production, dev, self-contained).\n - Compose orchestration:\n - [docker-compose.yml](docker-compose.yml) wires engine, backend and ui for local multi-container development.\n - [docker-compose.postgres.yml](docker-compose.postgres.yml) adds a Postgres service and an integration hook for backends that require it.\n - [docker-compose.self-contained.yml](docker-compose.self-contained.yml) runs a single self-contained plan service image for lightweight deployments or CI.\n\n- Development, testing and placeholders\n - [ui](ui) is the SvelteKit app with both server and client build modes; Vite plugins in its config integrate sqlite/sql.js and inlined wasm when producing self-contained artifacts.\n - [src-cm_plan](src-cm_plan) currently hosts the FastAPI backend package (placeholder). Its presence marks the expected integration point for server APIs referenced by the UI and engine container builds.\n - [tests](tests) contains the test suite that spans domain logic, solver behaviour, API-layer expectations and persistence adapters. Tests reveal many cross-module interactions (engine solving, wasm integration, UI bundling, backend persistence).\n\nKey cross‑module workflows\n- Build-and-embed WASM workflow (browser-first/self-contained):\n 1. Build Rust crate in [engine](engine).\n 2. Run [ui-scripts](ui-scripts)/build-engine-wasm.mjs to write artifacts into [ui-src](ui-src).\n 3. Build [ui](ui); optional bundle via ui-scripts to produce a self-contained HTML.\n 4. Produce [docker-ui](docker-ui) self-contained image or serve static assets.\n\n- Containerized dev/devops workflow (server-backed):\n 1. Build images: [docker-engine](docker-engine), [docker-backend](docker-backend), [docker-ui](docker-ui).\n 2. Compose them with [docker-compose.yml] for local integration; add [docker-compose.postgres.yml] if a real Postgres is required.\n 3. Use [docker-compose.self-contained.yml] to run a single-image lightweight service where appropriate.\n\n- Governance & release workflow\n - Before any change to persistence, public API, or released artifacts follow [AGENTS](AGENTS.md): bump versions in [pyproject.toml](pyproject.toml) and UI package locations, run GitNexus detection workflows, perform impact analysis, and follow the prescribed pre-commit/PR checklist.\n\nQuick references\n- System specification and requirements: [docs](docs)\n- Developer and contributor README: [README](README.md)\n- Rust engine crate: [engine](engine)\n- WASM packaging & UI integration: [ui-scripts](ui-scripts), [ui-src](ui-src), [ui](ui)\n- Containers and compose files: [docker-engine](docker-engine), [docker-backend](docker-backend), [docker-ui](docker-ui), [docker-compose.yml](docker-compose.yml), [docker-compose.postgres.yml](docker-compose.postgres.yml), [docker-compose.self-contained.yml](docker-compose.self-contained.yml)\n- Tests and CI: [tests](tests)\n- Governance & version/schema workflows: [AGENTS](AGENTS.md)\n- Python project manifest: [pyproject.toml](pyproject.toml)\n- Backend API placeholder: [src-cm_plan](src-cm_plan)\n\nMermaid diagram (high‑level relationships)\n```mermaid\nflowchart LR\n EngineSrc[engine crate] -->|wasm build| UIScripts[ui-scripts]\n UIScripts --> UIWasm[ui-src (WASM package)]\n UIWasm --> UI[ui (SvelteKit)]\n UI -->|browser mode| Browser[Browser-only runtime]\n UI -->|static image| DockerUI[docker-ui image]\n EngineSrc -->|container build| DockerEngine[docker-engine image]\n BackendSrc[src-cm_plan / backend code] --> DockerBackend[docker-backend image]\n DockerEngine --> Compose[docker-compose.yml]\n DockerBackend --> Compose\n DockerUI --> Compose\n Postgres[postgres service] --> ComposePostgres[docker-compose.postgres.yml]\n ComposePostgres --> Compose\n Docs[docs / AGENTS] -. governs .-> EngineSrc\n Docs -. governs .-> UI\n Tests[tests] -->|integration| Compose\n```","overview":"# cdim-plan — Wiki\n\ncm-plan — Planning engine for Centre de la main (CM)\n=====================================================\n\nWelcome — this page is your entry point to the cm-plan repository (Centre de la main planning). It explains what the project does, how the pieces fit together, the key end-to-end flows you will encounter when working on the codebase, and where to look first.\n\nWhat this project does\n----------------------\ncm-plan is a constraint-based scheduling and planning engine with two deployment variants:\n\n- Server mode: a Python FastAPI backend using OR-Tools for solving, persisting data to SQLite or PostgreSQL, and serving a SvelteKit UI (Docker Compose deployment is the reference).\n- Browser-only mode: a Rust/WASM solver compiled to WebAssembly with a TypeScript façade and an in-browser sql.js persistence layer — the UI runs without a server.\n\nFeatures include generation, local repair, simulation, equity/resilience analysis, and an editable weekly grid for semester planning.\n\nHigh-level architecture\n-----------------------\nBelow is a compact view of the most important modules and how they interact. It should give you a one-glance mental map.\n\n```mermaid\nflowchart LR\n UI[Web UI]\n WASM[WASM & JS Engine Integration]\n Rust[Rust Engine]\n API[Python Backend API]\n Biz[Python Engine (Business Logic)]\n Domain[Backend Domain & Ports]\n Adapters[Backend Adapters & Persistence]\n DB[(SQLite / Postgres)]\n Other[Other (build, tests, tooling)]\n\n UI --> WASM\n UI --> API\n WASM --> Rust\n API --> Biz\n Biz --> Domain\n API --> Adapters\n Adapters --> DB\n Adapters --> Domain\n Other --> UI\n Other --> Biz\n```\n\nModule entry points (where to read next)\n----------------------------------------\n- Read the domain first to understand the stable concepts: [Backend Domain & Ports](backend-domain-ports.md) — start at cm_plan/domain/types.py and the Protocols that define repository ports.\n- See the core business logic in [Python Engine (Business Logic)](python-engine.md) — this is where imports, constraint evaluation, diffs, equity metrics, and local repair live; it calls into the solver and uses domain types.\n- Two solver choices:\n - The Python path uses OR-Tools via the python engine and is exposed through the API.\n - The Rust solver is in [Rust Engine](rust-engine.md) and is compiled to WASM for browser-only deployments.\n- The HTTP surface is [Python Backend API](python-backend-api.md) — see src/cm_plan/api/app.py (full backend) and src/cm_plan/api/engine_app.py (compute-only, stateless).\n- Persistence adapters live in [Backend Adapters & Persistence](backend-adapters-persistence.md) (SQLite and Postgres implementations; they implement the domain ports).\n- The client and browser integration are in [Web UI](web-ui.md) and [WASM & JS Engine Integration](wasm-js-engine-integration.md).\n- Repo-wide build, tests and developer tools are under [Other](other.md).\n\nKey end-to-end flows (what you will trace in the code)\n-----------------------------------------------------\nSeveral flows are used frequently in development and testing; these are good to follow with the debugger or by reading the call stack.\n\n- Loading a semester database in the browser:\n - UI store calls openDatabaseFile/openDatabaseBytes → db migration steps → normalize person marks → normalizeMarkValue (client-side types).\n - See ui/src/lib/db.ts and src/lib/types.ts in the UI code for the normalization chain.\n\n- Applying an import to a semester (server-side path):\n - API route apply_import_to_semester → engine.imports.import_file → engine filters detect/can_handle → parsing and conversion logic.\n - See cm_plan/api/routes_persistence.py and cm_plan/engine/imports.py.\n\n- Exporting ICS from the browser-only store:\n - api-browser.ts exportIcs → query DB → getDb → produce ICS bytes.\n - See ui/src/lib/api-browser.ts and ui/src/lib/db.ts.\n\n- Backend request lifecycle:\n - HTTP request to [Python Backend API](python-backend-api.md) → thin route handler → [Python Engine (Business Logic)](python-engine.md) for heavy work → domain models and ports → selected adapter in [Backend Adapters & Persistence](backend-adapters-persistence.md) for storage.\n\nQuick setup (developer)\n-----------------------\nPrerequisites:\n- Python >= 3.12\n- Rust + wasm32 target if working on the WASM engine\n- Docker / Docker Compose for the server reference deployment\n\nCommon dev workflows:\n- Install Python dev environment:\n - python -m venv .venv\n - source .venv/bin/activate\n - python -m pip install -e .\n - (Dependencies are declared in pyproject.toml: pydantic, ortools, fastapi, uvicorn, etc.)\n\n- Run the API locally:\n - uvicorn cm_plan.api.app:app --reload --port 8000\n - Or use Docker Compose (reference deployment): docker compose up --build\n\n- Run the UI locally:\n - cd ui\n - npm install\n - npm run dev (SvelteKit dev server)\n - For browser-only mode, build the WASM and use the browser API façade.\n\nWhere to start as a new contributor\n----------------------------------\n1. Read the domain model: [Backend Domain & Ports](backend-domain-ports.md).\n2. Run the test suite under [Other](other.md) to ensure your environment works.\n3. Explore a common end-to-end: open the UI, load a sample semester, trace normalizeMarkValue in the client-side types, then trigger an import and follow server-side import flow.\n4. If you plan to work on solving logic, read the Python engine path first and then the [Rust Engine](rust-engine.md) if you touch WASM.\n\nSupport and conventions\n-----------------------\n- The repository emphasizes clear separation: domain (pure models and ports), adapters (persistence), business logic (engine), and API/UI. Follow that separation when adding features or tests.\n- Tests and CI live under the project tooling — see [Other](other.md) for test runners and CI rules.\n- If you are changing data shapes, update both the domain models and all persistence adapters.\n\nUseful files to open immediately\n- src/cm_plan/domain/types.py\n- src/cm_plan/api/app.py and engine_app.py\n- src/cm_plan/engine (imports, solver interface)\n- src/cm_plan/adapters/*_adapter.py\n- ui/src/lib/db.ts and ui/src/lib/types.ts\n- ui/src/lib/wasm/ and ui/src/lib/engine.ts\n\nWelcome aboard — follow the reading order above, run the tests, and don’t hesitate to search for the function names mentioned on this page. The rest of the wiki has module-specific documentation linked from the headings above.","python-backend-api":"# Python Backend API\n\nPython Backend API — cm_plan.api\n\nPurpose\n- Provides the HTTP surface for the CM Planning engine.\n- Exposes two FastAPI applications:\n - app (src/cm_plan/api/app.py): full backend with persistence, authentication and audit.\n - engine_app (src/cm_plan/api/engine_app.py): compute-only, stateless service exposing solver/evaluator/simulator endpoints.\n- Routes are thin wrappers around the pure planning engine and domain logic; most heavy work is delegated to cm_plan.engine and cm_plan.domain packages.\n- Persistence is pluggable via repository adapters (SQLite by default, PostgreSQL when CM_PG_DSN is set).\n\nEnvironment configuration\n- CM_DB_PATH (default \"cm_plan.db\") — path for SQLite backend.\n- CM_PG_DSN (empty by default) — when set, selects PostgreSQL adapter.\n- CM_JWT_SECRET (default \"dev-secret-change-in-production\") — secret for JWT signing.\n- CM_JWT_EXPIRE_MINUTES (default \"480\") — token lifetime in minutes.\n- CM_OIDC_ISSUER, CM_OIDC_CLIENT_ID, CM_OIDC_CLIENT_SECRET, CM_OIDC_REDIRECT_URI, CM_OIDC_SCOPES — OpenID Connect configuration used by the OIDC integration.\n\nHow the app starts (app.py)\n- FastAPI app is configured with a lifespan asynccontextmanager _lifespan(application).\n - At startup it chooses an adapter: if CM_PG_DSN is set, it imports cm_plan.adapters.pg_adapter.pg_connect; otherwise cm_plan.adapters.sqlite_adapter.connect.\n - The connection is stored on application.state as db_conn; the backend kind is stored as db_backend (\"pg\" or \"sqlite\").\n - At shutdown it closes the connection via conn.close().\n- The app includes three routers:\n - routes (stateless engine endpoints)\n - routes_persistence (persistence-aware endpoints)\n - routes_auth (authentication & user management)\n- CORS middleware is enabled permissively.\n\nDependency injection (deps.py)\n- get_conn(request: Request) -> returns request.app.state.db_conn.\n- _is_pg(request: Request) -> checks request.app.state.db_backend == \"pg\".\n- Repository factory functions choose concrete repository implementations at request time:\n - get_planning_repo -> PgPlanningRepository or SqlitePlanningRepository\n - get_context_repo -> PgSemesterContextRepository or SqliteSemesterContextRepository\n - get_ruleset_repo -> PgRuleSetRepository or SqliteRuleSetRepository\n - get_audit_log -> PgAuditLog or SqliteAuditLog\n - get_user_repo -> PgUserRepository or SqliteUserRepository\n- This pattern avoids circular imports and centralizes backend selection.\n\nAuthentication (auth_service.py and routes_auth.py)\n- Password hashing\n - hash_password(plain: str) -> str: bcrypt.hashpw with gensalt. Each call produces a unique salt.\n - verify_password(plain: str, hashed: str) -> bool: bcrypt.checkpw.\n- JWT tokens\n - create_access_token(user: User) -> str:\n - Uses CM_JWT_SECRET and HS256.\n - Payload: sub (user.id), username, roles (list of role values), exp (UTC expiry).\n - decode_access_token(token: str) -> TokenPayload | None:\n - Returns TokenPayload on success; returns None on decode/validation failure.\n- User factory\n - create_user(username, password, roles) -> User: convenience for seeded users; will hash the provided password and default role to AppRole.LECTEUR.\n- Routes and dependencies (routes_auth.py)\n - _oauth2_scheme = OAuth2PasswordBearer(tokenUrl=\"/api/auth/login\", auto_error=False)\n - get_current_user(token, request) -> User | None:\n - Calls decode_access_token; then loads user via repo.get_by_id; returns None if token invalid or user inactive.\n - require_user(...) -> User: raises 401 if no authenticated user.\n - require_permission(permission: Permission) -> callable dependency:\n - The callable resolves current user and enforces permission using user_permissions(user.roles); raises 403 if missing.\n - Endpoints\n - POST /api/auth/login: verifies credentials using get_user_repo and verify_password; returns TokenResponse (access_token: str).\n - GET /api/auth/me: returns current user info.\n - POST /api/users: create_user_endpoint — requires Permission.MANAGE_USERS; creates user via auth_service.create_user and repo.save, records audit.\n - GET /api/users: list_users — requires manage users permission.\n - PUT /api/users/{user_id}/roles: update_user_roles — requires MANAGE_ROLES.\n - DELETE /api/users/{user_id}: delete_user — requires MANAGE_USERS.\n - POST /api/auth/seed: seed_admin — idempotent endpoint that creates an admin user when user table is empty (username \"admin\", password \"admin\").\n - OIDC endpoints\n - /auth/oidc/login -> returns an authorization URL and a state token (calls oidc_service.get_authorization_url).\n - /auth/oidc/callback -> exchanges code for tokens, validates id_token, creates or updates a local user, returns local JWT token.\n - /auth/oidc/status -> reports whether OIDC is configured.\n - Patterns: endpoints use FastAPI Depends to get repos and audit logger; audit.record() is called for security-sensitive operations (login, user create/delete, OIDC events).\n\nOIDC integration (oidc_service.py)\n- Enabled if CM_OIDC_ISSUER and CM_OIDC_CLIENT_ID are set (oidc_enabled()).\n- Discovery and JWKS\n - _discover() fetches and caches the provider discovery document (/.well-known/openid-configuration).\n - get_jwks() fetches the keys via discovery.jwks_uri.\n- Flows\n - get_authorization_url(state): constructs authorization_endpoint URL with required query params.\n - exchange_code(code): POST to token_endpoint to exchange authorization code for tokens.\n - validate_id_token(id_token): finds appropriate JWKS key by kid and verifies token with jose.jwt.decode (audience=_OIDC_CLIENT_ID, issuer=_OIDC_ISSUER).\n - extract_user_info(claims) -> dict with username, email, name, sub: used to create/find local user records.\n\nStateless engine routes (routes.py)\n- Purpose: expose pure planning engine functions as HTTP endpoints. Each request carries full context; engine is stateless.\n- Endpoints\n - GET /api/health -> health()\n - POST /api/solve -> solve_endpoint(req: SolveRequest) -> delegates to cm_plan.engine.solver.solve. Converts dataclasses/Pydantic nested objects to plain dicts via internal _dc_to_dict.\n - POST /api/evaluate -> evaluate_endpoint -> cm_plan.engine.constraint.evaluate.\n - POST /api/evaluate/detailed -> evaluate_detailed_endpoint:\n - Runs evaluate and returns grouped violations by rule_id with per-rule metadata, counts and nature/weight.\n - POST /api/equity -> equity_endpoint -> cm_plan.engine.equity.compute_equity.\n - POST /api/resilience -> resilience_endpoint -> cm_plan.engine.resilience.compute_resilience.\n - POST /api/simulate -> simulate_endpoint -> cm_plan.engine.simulation.simulate.\n - POST /api/overtime-check -> overtime_check_endpoint -> cm_plan.engine.exports.check_overtime_horizon (stateless variant).\n- Utility: _dc_to_dict is used to convert dataclasses (and nested Pydantic models) recursively into JSON-serializable dictionaries.\n\nPersistence-aware routes (routes_persistence.py)\n- Purpose: provide CRUD for persisted domain objects (plannings, semesters/contexts, rulesets), import flows, exports and reports, audit queries.\n- DI aliases are used to attach repository dependencies (PlanningRepo, ContextRepo, RuleSetRepo, Audit).\n- Key areas:\n - Planning CRUD\n - POST /plannings -> save_planning(planning); repo.save then audit.record(SAVE_PLANNING).\n - GET /plannings/{id} -> get_planning; supports version selection.\n - GET /plannings/{id}/versions -> list versions.\n - POST /plannings/{id}/versions/{version}/status -> update_planning_status; updates status and records audit.\n - GET /plannings -> list_plannings(status?) returning JSON dumps.\n - GET /plannings/{id}/diff?v1=&v2= -> diff_versions: uses engine.diff.diff_plannings and emits structured change lists.\n - Semester Context CRUD\n - POST /semesters -> save_semester(ctx) returns semester_id.\n - GET /semesters/{semester_id} and GET /semesters -> get/list context(s).\n - Absences/Guards\n - List/replace/add endpoints for absences and guards under /semesters/{semester_id}/...\n - replace_absences replaces context.absences and saves; add_absence appends.\n - Rulesets\n - CRUD endpoints for rulesets; validate_ruleset calls RuleSet.validate_coherence(); clone_ruleset calls RuleSet.clone and saves.\n - POST /rulesets/default builds and saves build_default_ruleset().\n - POST /rulesets/{ruleset_id}/preview: evaluates the ruleset against a saved planning and context and returns violation summary.\n - Audit log\n - GET /audit supports filtering by planning_id, user, action, since, limit.\n - POST /audit/purge deletes older entries before a given datetime.\n - Exports\n - GET /holidays/{year} returns vaud_holidays(year).\n - POST /plannings/{id}/export/json and /anonymized, GET /plannings/{id}/impact-report, /ics/{physician_id}, /plannings/{id}/overtime-alerts and /plannings/{id}/export/html use engine.exports helpers.\n - export_anonymized returns anonymized planning plus mapping.\n - Import helpers and endpoints\n - Low-level parse endpoints: /import/guards/csv, /import/guards/xlsx, /import/absences/csv, /import/absences/xlsx accept CSV text or base64-encoded XLSX and call parse_* functions from cm_plan.engine.imports.\n - POST /import/file: auto-detect filter via import_file(filename, bytes) and return parsed assignments/absences/guards/warnings.\n - POST /semesters/{semester_id}/import/file: apply_import_to_semester:\n - Calls import_file to parse content.\n - Ensures physicians exist for imported names via _ensure_physicians_for_import which:\n - Normalizes names with _normalize_physician_name (NFKD fold, strip combining marks, whitespace normalization, uppercase).\n - Infers roles from imported guards/assignments using _infer_roles_from_import which calls _merge_role_hint to prefer higher-ranked roles (rank: MA=0, CDC=1, CADRE=2).\n - Creates new Physician objects if needed and appends them to ctx.physicians.\n - Builds Absence and Guard objects via _build_absences_from_import and _build_guards_from_import and replaces existing data where applicable (_replace_absences_for_import, _replace_guards_for_import).\n - Important behavior: only absences and guards are automatically applied in this \"import as priming\" flow; detected weekly assignments will produce a warning in the returned result.\n- Error handling: endpoints raise fastapi.HTTPException with appropriate status codes (400, 404, 409, 401, 403) following consistent patterns.\n\nSchemas (schemas.py)\n- Request models use domain types directly when possible (SemesterContext, RuleSet, Assignment, Absence).\n- Main request models:\n - SolveRequest: ctx, ruleset, mode, reference_assignments, time_limit_seconds, week_start, week_end.\n - EvaluateRequest: assignments, ruleset, ctx.\n - EquityRequest, ResilienceRequest: similar shape.\n - SimulateRequest: ctx, ruleset, current_assignments, hypothetical_absences, time_limit_seconds.\n\nAudit and logging\n- The audit log is injected using get_audit_log and used pervasively for actions that change state or record security events. Typical pattern: audit.record(action=..., user=..., detail=..., planning_id=...).\n\nSecurity and permission model\n- Tokens: OAuth2 Password flow with bearer tokens; tokenUrl configured to /api/auth/login.\n- Permissions are enforced by require_permission(Permission.*) dependencies in routes_auth and elsewhere.\n- get_current_user returns None when no or invalid token is supplied; require_user wraps this to enforce authentication when needed.\n\nInteracting with the engine and domain packages\n- Routes in routes.py and routes_persistence.py delegate to:\n - cm_plan.engine.solver.solve\n - cm_plan.engine.constraint.evaluate\n - cm_plan.engine.equity.compute_equity\n - cm_plan.engine.resilience.compute_resilience\n - cm_plan.engine.simulation.simulate\n - cm_plan.engine.exports.* for various exports and checks\n - cm_plan.engine.imports.import_file / parse_* for file parsing\n - cm_plan.domain.rules and cm_plan.domain.types for RuleSet manipulation and type definitions\n- Keep routes thin: they translate incoming Pydantic models to the engine API, then translate results to JSON-friendly structures (strings for UUIDs, ISO dates, enum .value strings).\n\nTesting pointers and notable behaviors\n- Authentication helpers are unit-tested in tests/test_auth.py via direct calls to hash_password, verify_password, create_user, create_access_token and decode_access_token.\n- Engine endpoints are exercised in tests/test_api.py using the FastAPI test client against solve/evaluate/simulate/equity/resilience endpoints.\n- Important behaviors to preserve when changing code:\n - _dc_to_dict converts dataclasses and nested Pydantic models recursively for endpoints that return engine objects (solve/evaluate/etc.).\n - seed_admin endpoint is intentionally idempotent and only available when user table is empty.\n - apply_import_to_semester creates physicians inferred from import content and only auto-applies absences and guards (assignments cause a warning).\n - OIDC discovery and JWKS retrieval are cached at module-level in oidc_service._discovery for discovery document only.\n\nExtending the API\n- Adding a new persisted resource:\n - Add repo interface implementation in adapters (sqlite_adapter and pg_adapter).\n - Expose repository through deps.py factory for DI selection.\n - Add router endpoint in routes_persistence.py; use the same patterns: repo.save/get/list, raise HTTPException on missing resources, and call audit.record for state changes.\n- Adding a new stateless engine endpoint:\n - Add a Pydantic schema in schemas.py if needed.\n - Create a handler in routes.py that delegates to the engine, then returns a JSON-serializable result (use _dc_to_dict when returning dataclasses).\n- Adding permissions:\n - Extend domain types Permission and user_permissions logic (cm_plan.domain.types).\n - Enforce via require_permission(Permission.X) dependency on endpoints.\n\nOperational notes\n- To run the compute-only engine: use the engine_app FastAPI instance (src/cm_plan/api/engine_app.py) — useful for splitting compute and persistence in deployments.\n- DB backend switching:\n - For SQLite: set CM_DB_PATH or use default.\n - For PostgreSQL: set CM_PG_DSN; app._lifespan will import and call pg_connect.\n- CORS: configured to allow all origins/headers/methods by default; adjust in app.py if needed for production.\n\nArchitecture overview (high level)\n- The following diagram summarizes key components and runtime wiring:\n\n```mermaid\nflowchart LR\n A[FastAPI app (app / engine_app)]\n A --> B[Routers: routes, routes_persistence, routes_auth]\n A --> C[Lifespan → DB connection on app.state]\n C --> D[Adapters: sqlite_adapter | pg_adapter]\n B --> E[Dependency factories (deps.py)] \n E --> D\n B --> F[Engine & Domain (cm_plan.engine, cm_plan.domain)]\n B --> G[OIDC & Auth services (auth_service, oidc_service)]\n```\n\nCommon pitfalls and gotchas\n- Do not assume returned objects from engine are JSON-serializable; use _dc_to_dict or .model_dump(mode=\"json\") as appropriate.\n- decode_access_token returns None on failure — calling code must handle None to avoid crashes.\n- OIDC flows require valid issuer and client configuration; missing CM_OIDC_* env vars cause oidc_enabled() to return False and endpoints to return 501.\n- import endpoints accept base64-encoded file contents for binary formats; forgetting base64 encoding will result in malformed input.\n- Role inference from imported files is heuristic-based (see _infer_roles_from_import and _merge_role_hint); review if import formats change.\n\nFiles of interest\n- src/cm_plan/api/app.py — main FastAPI app and lifespan DB wiring.\n- src/cm_plan/api/engine_app.py — compute-only FastAPI app.\n- src/cm_plan/api/deps.py — DI factory functions for repositories and audit log selection.\n- src/cm_plan/api/auth_service.py — password hashing and JWT utilities.\n- src/cm_plan/api/oidc_service.py — OpenID Connect helper functions.\n- src/cm_plan/api/routes.py — stateless engine endpoints (solve/evaluate/simulate/etc.).\n- src/cm_plan/api/routes_auth.py — authentication endpoints and permission dependencies.\n- src/cm_plan/api/routes_persistence.py — persistence-aware endpoints (CRUDs, imports, exports, audit).\n- src/cm_plan/api/schemas.py — Pydantic request models used by API handlers.\n\nIf you will modify or add endpoints, follow existing patterns: keep business logic in engine/domain packages, keep routes thin, use DI functions in deps.py to obtain repositories, and record audit events for state-changing actions.","python-engine-business-logic":"# Python Engine (Business Logic)\n\nPython Engine (Business Logic)\n=============================\n\nSummary\n-------\nThe cm_plan.engine package implements the core business logic for the planning system: importing external spreadsheets/PDFs, evaluating constraints, producing diffs and impact reports, computing equity/resilience metrics, and performing local \"repair\" replanning. It is the layer that understands domain rules and planning semantics and coordinates with the solver (cm_plan.engine.solver) and domain models (cm_plan.domain.*).\n\nThis documentation describes the main responsibilities, public API surface, key internal data structures, and extension points relevant for a developer who needs to understand or contribute to the engine code.\n\nHigh-level architecture\n----------------------\n- Import pipeline: engine.filters.* provide pluggable parsers (XLSX, CSV, PDF) that produce ImportResult records. imports.import_file auto-detects and runs a filter.\n- Evaluation: constraint.evaluate checks a Planning/assignment set against a RuleSet and returns violations.\n- Metrics: equity.compute_equity and resilience.compute_resilience produce fairness and robustness reports.\n- Repair: repair.repair prepares a localized re-solve using the solver (solve) with a restricted window and reference assignments.\n- Exports: exports contains utilities for anonymization, JSON/HTML/.ics export, impact reports and other helper functions (holiday calendars, overtime checks).\n- Diff: diff.diff_plannings compares two Planning versions into a structured PlanningDiff.\n\nMermaid overview (small)\n------------------------\nThis tiny diagram shows major flows and components.\n\n```mermaid\nflowchart LR\n Filters[Filters Registry] -->|parse_auto| ImportFile[imports.import_file]\n ImportFile -->|ImportResult| Domain[Domain objects (Absence/Assignment/Planning)]\n Domain --> Constraint[constraint.evaluate]\n Domain --> Equity[equity.compute_equity]\n Domain --> Resilience[resilience.compute_resilience]\n Domain --> Repair[repair.repair --> solver.solve]\n Exports[exports.*] -->|uses| Domain\n```\n\nCore modules & public APIs\n-------------------------\n\n1) cm_plan.engine.constraint\n- Purpose: stateless constraint evaluator. It does not solve; it inspects an assignment set and returns violations per rule.\n- Key public symbol:\n - evaluate(assignments: list[Assignment], ruleset: RuleSet, ctx: SemesterContext) -> EvaluationResult\n - Builds a fast index (_build_index), iterates enabled rules and dispatches each rule to a checker via _dispatch_rule.\n - Returns EvaluationResult which contains Violation entries and convenience properties:\n - EvaluationResult.hard_violations (ConstraintNature.DURE)\n - EvaluationResult.soft_violations (ConstraintNature.MOLLE)\n - EvaluationResult.is_feasible (no hard violations)\n- Important classes:\n - Violation(rule_id: str, nature: ConstraintNature, message: str, half_day: HalfDay|None, physician_id: PhysicianId|None)\n - EvaluationResult\n- Implementation notes:\n - A precomputed _PlanningIndex is built by _build_index. Index fields include by_half_day, by_physician, by_physician_halfday, physician_map, absence_set, room_cap, guard_dates, holiday_set, closure_set, nominative_map, and calendar_restrictions. This index is designed for O(1) lookups used by multiple checkers.\n - Checkers implemented (examples): _check_single_assignment, _check_min_staffing, _check_supervision, _check_eligibility, _check_quota, _check_min_operators_per_or, _check_holiday_bridging, _check_calendar_coherence, _check_closure, _check_nominative_coupling, _check_absent_operator.\n - _dispatch_rule maps ConstraintType to the checker function; unknown constraint types return no violations (empty list).\n- Extension: add a new constraint checker and register it in _dispatch_rule mapping.\n\n2) cm_plan.engine.diff\n- Purpose: produce a structured diff between two Planning objects.\n- Public API:\n - diff_plannings(before: Planning, after: Planning) -> PlanningDiff\n- Key dataclasses:\n - AssignmentChange(half_day, physician_id, old_activity: Optional[str], new_activity: Optional[str])\n - PlanningDiff(added, removed, modified) with helpers total_changes and is_empty\n- Behavior: builds maps keyed by (physician_id, HalfDay) and computes added/removed/modified changes.\n\n3) cm_plan.engine.equity\n- Purpose: compute fairness metrics (SDS-3003) — how workload distribution compares to activity rates.\n- Public API:\n - compute_equity(assignments: list[Assignment], ctx: SemesterContext, ruleset: RuleSet) -> EquityReport\n- Data models:\n - EquityScore(role, activities, weighted_stddev, per_physician: dict[name, weighted_count])\n - EquityReport(scores: list[EquityScore]) with overall_deviation property\n- Behavior:\n - Locates an enabled Rule of ConstraintType.EQUITY in the ruleset and uses its \"categories\" parameter to compute per-role, per-activity-category weighted counts.\n - Weighting uses physician.activity_rate (ensures rate >= 0.01) and computes normalized counts, standard deviation and rounds values for readability.\n - Absence categories in _EQUITY_EXCLUDED_ABSENCES are not penalized in the counts.\n\n4) cm_plan.engine.exports\n- Purpose: various planning export/import helpers and reporting utilities:\n - anonymize_planning(planning, ctx) -> (anonymized_planning, mapping)\n - Replaces physician UUIDs with synthetic UUIDs and returns a mapping from original IDs to labels like \"Médecin-001\".\n - build_impact_report(before, after, ctx) -> ImpactReport\n - Uses diff_plannings internally to produce ImpactEntry records for each change; summarizes totals and affected physicians/half-days.\n - closed_or_journal(ctx) -> list[ClosedOREntry]\n - Extract closures relevant to OR activities.\n - physician_ics(physician_id, physician_name, assignments) -> str\n - Produces a minimal .ics calendar with VEVENT entries per half-day assignment.\n - export_planning_json(planning, ctx, ruleset=None) -> dict\n - Produces self-contained JSON document including planning, semester_context and optionally ruleset.\n - vaud_holidays(year) and easter_date(year)\n - Compute standard canton holidays used across imports/exports and holiday-aware checks.\n - check_overtime_horizon(ctx, assignments, hours_per_half_day=4.0) -> list[OvertimeAlert]\n - Warns when a physician's overtime_hours cannot be absorbed given remaining free half-days.\n - estimate_hs_default(previous_overtime, recovery_slots_used, hours_per_half_day=4.0) -> float\n - Helper to estimate carried overtime balance for the next semester.\n - detect_locked_conflicts(ctx, ruleset) -> list[LockedConflict]\n - Scans ctx.locked_assignments and reports conflicts that would block the solver (e.g., locked to activity while absent or role-ineligible).\n - export_planning_html(planning, ctx, title=\"Planning CM\", include_alerts=True, variant=\"published\") -> str\n - Generates an HTML table (A3-friendly) for PDF rendering; variant badge, locked styling, includes a brief alerts placeholder.\n - Part-time helpers:\n - PartTimeAllocation dataclass and compute_part_time_target(physician_rate, total_slots, allocation) -> int\n - DEFAULT_PART_TIME_ALLOCATIONS list provided for typical roles.\n- Implementation notes:\n - anonymize_planning uses pydantic .model_copy(update={...}) on Planning/Assignment objects.\n - physician_ics uses fixed times for AM/PM and embeds assignment state in DESCRIPTION.\n - check_overtime_horizon counts existing RECUP_HS assignments and iterates days to compute available free half-days between physician.start_date and semester_end (weekdays only).\n\n5) cm_plan.engine.filters (package)\n- Purpose: pluggable import filter framework. Individual filter modules implement parsers for specific external formats (XLSX/CSV/PDF). Filters register themselves with register(filter_instance).\n- Important elements:\n - ImportFilter protocol (name, description, can_handle(filename, bytes) -> bool, parse(filename, bytes) -> ImportResult).\n - ImportResult aggregates absences (AbsenceRecord), assignments (AssignmentRecord), guards (GuardRecord), physician_names, warnings and source info.\n - Registry functions:\n - register(filt: ImportFilter) -> ImportFilter\n - registered_filters() -> list[ImportFilter]\n - detect(filename, file_bytes) -> ImportFilter|None\n - parse_auto(filename, file_bytes) -> ImportResult (first matching filter; raises ValueError if none)\n - Shared helpers:\n - parse_french_month(text) -> Optional[int]\n - parse_french_date_header(text) -> Optional[(weekday_index, day_of_month)]\n - half_days_for_date(d: date) -> list[HalfDay]\n- Implementation notes:\n - When cm_plan.engine.imports imports individual filters (absences_chuv, absences_generic, guards_generic, guards_pdf_chuv, weekly_plan_chuv), those modules auto-register filter instances at import-time.\n - Filters often depend on third-party libraries: openpyxl and pypdf. They may parse cell fill colors (ARGB hex strings) and header heuristics.\n- Example filters included in this package:\n - AbsencesCHUVFilter: color-coded monthly absence grid (XLSX).\n - AbsencesCSVFilter / AbsencesXLSXFilter: generic CSV/XLSX with explicit columns.\n - GuardsCSVFilter / GuardsXLSXFilter: generic guard CSV/XLSX.\n - GuardsPDFCHUVFilter: CHUV monthly guard roster PDF via pypdf.\n - WeeklyPlanCHUVFilter: CHUV weekly grid XLSX with rich heuristics for color→activity mapping and name cleaning.\n\n6) cm_plan.engine.imports\n- Purpose: stable import entry points for absences and guards and auto-detection wrapper.\n- Public API:\n - import_file(filename: str, file_bytes: bytes) -> ImportResult\n - Uses filters.parse_auto to auto-detect filter and parse the file.\n - parse_guards_csv / parse_guards_xlsx -> list[Guard] (legacy import helpers)\n - parse_absences_csv / parse_absences_xlsx -> list[Absence] (legacy import helpers)\n- Implementation notes:\n - parse_absences_xlsx and parse_guards_xlsx use _find_col header heuristics and convert rows to domain Absence/Guard objects (cm_plan.domain.types).\n\n7) cm_plan.engine.repair\n- Purpose: local \"repair\" replanning when perturbations (new absences) occur. Implements minimal-diff approach for stability.\n- Public API:\n - repair(ctx: SemesterContext, ruleset: RuleSet, current_assignments: list[Assignment], new_absences: list[Absence]|None = None, time_limit_seconds: float = 5.0) -> SolverResult\n- Behavior:\n - Merges new_absences into a deep copy of ctx (merged_ctx).\n - Computes affected date window from new_absences (expand ±2 days, clamped to semester).\n - Carries locked assignments from current_assignments into merged_ctx.locked_assignments.\n - Calls the central solve(...) function from cm_plan.engine.solver with SolveMode.REPAIR and a reference_assignments argument for stability objectives.\n- Notes:\n - repair delegates the heavy-lifting to the solver. It prepares the context and constraints that produce a solution with minimal deviation from the current plan.\n\n8) cm_plan.engine.resilience\n- Purpose: measure planning robustness to shocks (SDS-700x). The analysis is heuristic, fast and does not run the full solver.\n- Public API:\n - compute_resilience(assignments: list[Assignment], ctx: SemesterContext, ruleset: RuleSet) -> ResilienceReport\n- Outputs:\n - ResilienceReport containing a list of ShockScore entries and a margins breakdown (MarginCategory rows).\n - ShockScore(category, indicator: TrafficLight, margin:int, detail:str)\n - Margins list produced by _compute_margins.\n- Key behaviors:\n - Builds a slot_index mapping HalfDay → list[(physician_id, activity)].\n - Extracts applicable min_staffing rules from ruleset.\n - Evaluates shocks like \"loss of one MA\", \"loss of one CDC\", \"loss of a CADRE\", OR replacement/extra-OR scenarios using helper functions (_evaluate_role_shock, _evaluate_cadre_shock, _evaluate_or_open_shock, _evaluate_extra_or_shock).\n - Computes margins and assigns TrafficLight indicators (VERT / ORANGE / ROUGE).\n- Implementation notes:\n - Uses _get_min_required to resolve exceptions in min_staffing rules for a given half-day.\n - The resilience module is intended to produce human-readable indicators and margin counts that complement solver-based checks.\n\nImportant data flows & interactions\n---------------------------------\n- imports.import_file -> filters.parse_auto -> filter.can_handle / filter.parse -> ImportResult\n - filters are imported and auto-registered by importing the filter modules in cm_plan.engine.imports\n- constraint.evaluate uses _build_index(ctx, assignments) to produce fast lookups for all checkers. This index construction is central to performance.\n- repair.repair calls solver.solve and passes a restricted week_start/week_end window plus reference_assignments and locked_assignments to preserve stability. See call graph: repair -> solve.\n- exports.build_impact_report depends on engine.diff.diff_plannings to compute added/removed/modified changes.\n- resilience.compute_resilience uses ruleset to extract minimum-staffing rules (ConstraintType.MIN_STAFFING) and then analyzes slot-level spare capacity.\n\nExtension & contribution guide\n------------------------------\n- Adding a new import filter:\n 1. Implement a class that satisfies the ImportFilter protocol: name, description, can_handle(filename, bytes) -> bool, parse(filename, bytes) -> ImportResult.\n 2. Create an instance and register it with register(instance) (filters should auto-register at module import).\n 3. Ensure dependencies (openpyxl/pypdf) are optional if possible; import them lazily in parse().\n 4. New filter will be picked up by imports.import_file via parse_auto().\n\n- Adding a new constraint type / checker:\n 1. Implement a checker function with signature checker(rule: Rule, assignments: list[Assignment], idx: _PlanningIndex, ctx: SemesterContext) -> list[Violation].\n 2. Update the _dispatch_rule mapping in cm_plan.engine.constraint to map the new ConstraintType to your checker.\n 3. Add tests that exercise the new rule type, ideally using evaluate() and checking EvaluationResult. Ensure _build_index provides the fields you need.\n\n- Adding a new export:\n - Implement a new function in exports.py (or a dedicated module under engine/exports) and keep it pure-data (return strings, dataclasses, or dicts). Reuse existing helpers for anonymization, holidays, or .ics generation.\n\nTesting & gotchas\n-----------------\n- Filters rely on visual heuristics (cell colors, sheet headers), so unit tests for filter parsing should include representative fixture files. Many filters skip weekends and attempt to parse month/year from sheet titles.\n- constraint._build_index must stay in sync with any new fields added to SemesterContext (e.g., if a new context property is added that checkers need).\n- constraint.evaluate intentionally separates evaluation from solving. It returns violations only; it does not change assignments or try to repair them.\n- exports.anonymize_planning uses deterministic but synthetic UUIDs derived from an integer counter (uuid.UUID(int=counter)). This is only for anonymized output and should not be mixed back into production data.\n- Some functions (physician_ics, export_planning_html) create human-readable output with small formatting and approximations (fixed times for AM/PM).\n- Many components depend on cm_plan.domain.types dataclasses/enums (Absence, Assignment, Planning, HalfDay, TimeSlot, Guard, Physician, Role, TrafficLight, etc.). When modifying domain models, ensure engine code uses the updated fields.\n\nPerformance considerations\n--------------------------\n- constraint._build_index groups assignments by half-day and physician to allow O(1) membership checks used by multiple checkers.\n- diff_plannings and equity computations are linear in the number of assignments and physicians; they are designed to be fast for interactive use.\n- repair.repair restricts the solver to a narrow time window and carries reference assignments to keep solver runtime bounded for perturbations.\n- Filters parsing spreadsheets/PDFs may be I/O and CPU heavy; they are executed once per upload and should be resilient to malformed worksheets.\n\nPractical examples\n------------------\n- Evaluate constraints for a planning:\n - result = constraint.evaluate(assignments, ruleset, ctx)\n - if not result.is_feasible: inspect result.hard_violations for blocking issues.\n\n- Import a file:\n - import_result = imports.import_file(\"Absences_Aout_2024.xlsx\", file_bytes)\n - inspect import_result.absences / assignments / warnings; map physician names to UUIDs using your application logic.\n\n- Compute equity and resilience:\n - equity_report = equity.compute_equity(assignments, ctx, ruleset)\n - resilience_report = resilience.compute_resilience(assignments, ctx, ruleset)\n\n- Run a repair after new absences:\n - solver_result = repair.repair(ctx, ruleset, current_assignments, new_absences=[...], time_limit_seconds=10.0)\n\nMaintainer notes\n----------------\n- When adding new filters, import them in cm_plan.engine.imports (or ensure imports.import_file imports the module so the filter auto-registers).\n- Keep the filters' can_handle heuristics conservative to avoid mis-detection; parse_auto returns the first matching filter.\n- When adding new constraint types, update any external documentation of ConstraintType values and ensure the front-end or rules editor can create rules with the correct parameters expected by the checker.\n\nReferences in code\n------------------\n- Constraint checkers expect rule.parameters to contain keys specific to each ConstraintType. See each _check_* function for expected parameter names (e.g., \"activity\", \"min_count\", \"exceptions\", \"role\", \"max_per_month\", \"cascade\", etc.).\n- Filters helper functions to parse French month/day labels are in engine.filters.__init__.py: parse_french_month, parse_french_date_header, half_days_for_date.\n- The solver integration point is solver.solve; repair.repair and exports.detect_locked_conflicts call into solver or prepare contexts for it.\n\nThis package is the glue between external inputs, domain models and the solver. When contributing, focus on clear boundaries: filters produce ImportResult; constraint.evaluate consumes domain Assignments + SemesterContext + RuleSet and returns EvaluationResult; repair prepares contexts and calls solver; exports produce human- or machine-readable artifacts. Keep logic in engine modules deterministic and side-effect free where possible to ease testing.","rust-engine":"# Rust Engine\n\nRust Engine — Module Documentation\n\nOverview\nThis crate implements the core scheduling engine for the CM planning product. It provides:\n- A compact, feature-aware local-search scheduler (local_search.rs) used in practice for semester-sized problems.\n- A pure-Rust CP-SAT style modeling layer and branch-and-bound solver (cpsat.rs) that implements linear/implication/abs-equality constraints and optimization.\n- A constraint evaluator (constraint.rs) that checks a planning against a RuleSet without solving.\n- Equity, diff, simulation and persistence helpers used by the UI and tests.\n- WASM bindings (bindings.rs) that expose the engine API as JSON-string functions to JavaScript (Svelte frontend).\n\nThe codebase is organized into modules exported from lib.rs:\n- types, rules — data model and ruleset representation.\n- constraint — evaluator producing violations (EvaluationResult).\n- cpsat — low-level CP model, propagation and B&B solver.\n- local_search — production local-search scheduler optimized for the problem.\n- solver — orchestration entrypoint (not shown fully here) that wires together local_search, cpsat builders and objectives.\n- equity, resilience, diff, simulation, persistence — analysis and helpers.\n- bindings — WASM entrypoints receiving/returning JSON.\n\nWASM bindings (bindings.rs)\nPurpose\nExpose engine capabilities to the frontend as JSON-in/JSON-out functions. These functions are annotated with #[wasm_bindgen] and accept stringified JSON inputs that map to the Rust types (SemesterContext, RuleSet, Assignment, etc.).\n\nKey functions\n- wasm_build_profile() -> String\n - Returns \"debug\" or \"release\" depending on compile profile.\n- get_schema_sql() -> String\n - Returns persistence::SCHEMA_SQL for DB initialization.\n- get_default_ruleset() -> String\n - Serializes rules::build_default_ruleset() to JSON.\n\nPrimary action APIs (all return serialized JSON):\n- wasm_solve(ctx_json, ruleset_json, mode, reference_json, time_limit, week_start, week_end) -> String\n - Parses SemesterContext and RuleSet, converts mode to SolveMode, optional reference assignments, optional week window, calls solver::solve, serializes result.\n- wasm_evaluate(assignments_json, ruleset_json, ctx_json) -> String\n - Parses assignments/ruleset/context and calls constraint::evaluate returning EvaluationResult.\n- wasm_compute_equity(assignments_json, ctx_json, ruleset_json) -> String\n - Calls equity::compute_equity and returns EquityReport.\n- wasm_compute_resilience(assignments_json, ctx_json, ruleset_json) -> String\n - Calls resilience::compute_resilience and returns a serialized result.\n- wasm_simulate(ctx_json, ruleset_json, current_assignments_json, hypothetical_absences_json, time_limit) -> String\n - Calls simulation::simulate for hypothetical-absence analyses.\n- wasm_diff_plannings(before_json, after_json) -> String\n - Calls diff::diff_plannings and serializes PlanningDiff.\n\nPersistence helpers:\n- wasm_serialize_planning(planning_json), wasm_serialize_semester(ctx_json)\n- wasm_save_planning_sql(planning_json) -> { sql, params }\n- wasm_save_semester_sql(semester_id, ctx_json) -> { sql, params }\n- wasm_save_ruleset_sql(ruleset_json) -> { sql, params }\n- wasm_validate_ruleset(ruleset_json) -> { valid, errors }\n\nError handling\nBindings return JSON with an \"error\" field via helper error_json(msg: &str) when parsing or serializing fails. The frontend expects this JSON contract (mirrors the Python API).\n\nConstraint evaluator (constraint.rs)\nPurpose\nEvaluate a Planning (list of Assignment) against a RuleSet and SemesterContext, returning violations. This is purely evaluative — it does not modify or solve schedules.\n\nPublic API\n- evaluate(assignments: &[Assignment], ruleset: &RuleSet, ctx: &SemesterContext) -> EvaluationResult\n\nCore types\n- Violation { rule_id, nature: ConstraintNature, message, half_day, physician_id }\n- EvaluationResult { violations: Vec<Violation> } with helpers:\n - hard_violations(), soft_violations(), is_feasible()\n\nImplementation notes\n- build_index builds a PlanningIndex for O(1) lookups: by_half_day, by_physician, absence_set, room capacities, holiday_set, closure_set, nominative_map, calendar_restrictions.\n- dispatch_rule maps rules::ConstraintType to checker functions (check_single_assignment, check_min_staffing, check_max_staffing, check_supervision, check_eligibility, check_quota, etc.).\n- Each checker inspects the index and produces Violation entries with informative messages (useful to present to users).\n\nExtending constraints\nTo add a new constraint:\n1. Add a ConstraintType variant in rules module.\n2. Implement a checker fn in constraint.rs with the signature used by other check_* functions.\n3. Add an entry in dispatch_rule to route the new ConstraintType.\n4. Update default_ruleset.json and tests.\n\nCPSAT-style model & solver (cpsat.rs)\nPurpose\nProvide a self-contained linear/implication CP model and a branch-and-bound solver with domain propagation. This is not a full substitute for OR-Tools but provides a deterministic, portable solver usable in tests and small models.\n\nCore data structures\n- VarId = usize\n- LinExpr { terms: Vec<(VarId, i64)>, constant: i64 }\n - evaluate(values: &[i64]) -> i64\n- LinearConstraint { expr: LinExpr, lb: i64, ub: i64 } (represents lb <= expr <= ub)\n- ImplicationConstraint { condition_var: VarId, condition_value: i64, inner: LinearConstraint }\n - Models \"only_enforce_if\" style implications.\n- Constraint enum: Linear | Implication | AbsEquality { result_var, source_var }\n- CpModel:\n - num_vars, lb, ub, is_boolean, constraints, objective: Option<LinExpr>, var_names\n - new_bool_var(name), new_int_var(lb, ub, name)\n - fix_var(var, value), add_linear_constraint, add_exactly, add_at_least, add_at_most\n - add_implication(condition_var, condition_value, expr, lb, ub)\n - add_abs_equality(result_var, source_var)\n - minimize(expr)\n\nSolver interfaces\n- SolveStatus: Optimal, Feasible, Infeasible, Unknown with name() -> &'static str\n- CpSolveResult { status, values, objective_value }\n- SolverConfig { time_limit_seconds, max_branches } with default 30s, 0 branches limit (0 = unlimited)\n- solve(model: &CpModel, config: &SolverConfig) -> CpSolveResult\n - Branch-and-bound with propagation\n - Uses propagate() to perform bound tightening and implication handling\n - select_branch_variable chooses the next variable (most-constrained boolean first)\n - Supports integer variables and abs-equality constraints.\n\nPropagation and constraint handling\n- propagate(model, state) repeatedly applies:\n - propagate_linear for LinearConstraint/Implication inner linear constraints\n - special-case propagation for sum-of-booleans constraints (fast pruning/fixing)\n - implication handling: if inner cannot be satisfied under current domains, force condition variable opposite\n - AbsEquality propagates |source| = result bounds\n\nBranching and bounds\n- objective_lower_bound computes a provable lower bound from current lb/ub\n- branch_and_bound implements an iterative, stack-based search (to avoid recursion)\n- max_branches_exceeded guards runaway searches (currently uses a hard cap of 10_000_000 in SolverState)\n\nHelpers\n- sum_vars(vars: &[VarId]) -> LinExpr\n- weighted_sum(terms: &[(VarId, i64)]) -> LinExpr\n- div_ceil/div_floor implement correct integer division rounding for bound computations\n\nSchedule-aware branching\n- ScheduleBrancher groups boolean decision vars into (physician_idx, halfday_idx) → Vec<VarId>\n- select_slot_with_key chooses the most-constrained slot (least remaining eligible activities)\n- solve_scheduling(model, config, brancher) implements schedule_branch_and_bound:\n - Greedy warm-start using brancher.hint_var (reference assignments) to get an initial feasible upper bound\n - Then performs slot-wise branching where a single chosen activity var is fixed to 1 and others in the slot fixed to 0\n - Supports integer choices as a fallback\n\nTests\ncpsat.rs contains unit tests for:\n- simple satisfaction/infeasibility/optimization\n- scheduling patterns\n- implication and abs-equality constraints\n- correctness of div_ceil/div_floor\n\nLocal search scheduler (local_search.rs)\nPurpose\nThe production-workhorse for realistic problem sizes. Uses a compact integer representation: one activity index per physician per half-day, not a one-hot boolean expansion. It starts from a reference schedule (REPAIR mode) or a greedy construction (GENERATE mode) and improves via simulated-annealing-style local moves while preserving feasibility as much as possible.\n\nKey types\n- ActIdx = u8\n- ScheduleConfig (immutable):\n - num_physicians, num_half_days, activities, act_to_idx, half_days, physicians\n - eligible[p][t] = Vec<ActIdx> sorted allowed activity indices (empty => fixed slot)\n - fixed[p][t] = bool for slots that must not be changed (locked/absent/recovery)\n - min_staffing, max_staffing, supervision, consult_cap, or_cap, quotas, hd_by_month, etc.\n - consult_acts, op_acts, stability_weight, equity_weight, num_months\n- Schedule (mutable):\n - assignment: Vec<ActIdx> flattened p * t\n - staffing: Vec<i32> flattened t * num_acts\n - quota_count: Vec<i32> flattened [p * num_months * num_acts + month*acts + a]\n - get(p, t), reassign(p, t, new_act, month_idx), swap(p1, p2, t, month_idx)\n\nImportant functions (flow)\n- build_config(ctx, ruleset, half_days) -> ScheduleConfig\n - Builds activity list (excluding CLOSED), act_to_idx, fixed/eligible matrices, closures, calendar restrictions, recup_we set via compute_recup_we (from solver.rs).\n- init_schedule / init from reference (not shown fully in snippet) creates a Schedule instance with staffing and quota caches primed for O(1) deltas.\n- is_reassign_feasible, is_swap_feasible, and simulated annealing move selection control which local moves are proposed and accepted.\n\nPRNG\n- Rng with xorshift64 is used to make search deterministic for tests. Methods: next_u64, next_usize(n), next_f64().\n\nTime helpers\n- now_seconds() variant for wasm32 and native platforms to avoid std::time in WASM.\n\nWhy local search?\n- cpsat boolean encoding explodes (~73k bools in a 26-week problem); local_search uses ~3.6k integer slots and starts from a near-feasible reference, enabling effective repairs and practical run-times. The solver orchestration (solver.rs) chooses between local_search and cpsat for different tasks (call graph shows solve → solve_local_search).\n\nDiff engine (diff.rs)\nPurpose\nCompute additive/removed/modified changes between two Planning versions.\n\nPublic API\n- diff_plannings(before: &Planning, after: &Planning) -> PlanningDiff\n - Produces three vectors of AssignmentChange and helper methods total_changes() and is_empty().\n\nEquity metrics (equity.rs)\nPurpose\nCompute per-role equity scores based on the ruleset's EQUITY constraint and the current SemesterContext.\n\nPublic API\n- compute_equity(assignments: &[Assignment], ctx: &SemesterContext, ruleset: &RuleSet) -> EquityReport\n\nOutput types\n- EquityScore { role: String, activities: Vec<ActivityCode>, weighted_stddev: f64, per_physician: HashMap<String, f64> }\n- EquityReport { scores: Vec<EquityScore> } with overall_deviation() helper.\n\nImplementation notes\n- Extracts \"categories\" and optional part_time_impact from the EQUITY rule parameters, computes per-physician scaled counts (count / activity_rate), computes stddev, and returns rounded metrics.\n\nInter-module connections (high-level)\nMermaid diagram: overall call direction (small, 7 nodes)\ngraph LR\n BIND[bindings.rs]\n SOLVER[solver.rs]\n LOCAL[local_search.rs]\n CPSAT[cpsat.rs]\n CONSTR[constraint.rs]\n EQUITY[equity.rs]\n PERSIST[persistence]\n\n BIND --> SOLVER\n BIND --> CONSTR\n BIND --> EQUITY\n BIND --> PERSIST\n SOLVER --> LOCAL\n SOLVER --> CPSAT\n SOLVER --> CONSTR\n\nNotes:\n- wasm_solve calls solver::solve which orchestrates solver strategies (local_search for large realistic scenarios, cpsat for small models / objective testing). The call graph indicates solver.rs calls solve_local_search and uses cpsat helper constructs (e.g. add_implication, weighted_sum).\n- constraint::evaluate and equity::compute_equity are used both by wasm bindings and internally by solver/local_search to score/validate solutions.\n\nDevelopment & Contribution Guide\n\n1. Running tests\n- From the engine crate root run:\n - cargo test\n- cpsat.rs includes unit tests; local_search and constraint have test coverage across the crate (see solver.rs tests referenced in call graph).\n\n2. Adding a constraint rule type\n- Add the new ConstraintType variant in rules module.\n- Implement a checker function in constraint.rs (pattern: fn check_name(rule: &Rule, assignments: &[Assignment], idx: &PlanningIndex, ctx: &SemesterContext) -> Vec<Violation>).\n- Add a match arm in dispatch_rule to call your checker.\n- Add/update default_ruleset.json and unit tests exercising the behavior.\n\n3. Extending the CP model (cpsat.rs)\n- Use CpModel::new_bool_var/new_int_var to add variables, set bounds with fix_var.\n- Add linear constraints with add_linear_constraint/add_exactly/add_at_least/add_at_most.\n- For conditional constraints, add_implication(condition_var, condition_value, expr, lb, ub).\n- For counts and absolute penalty variables, add_abs_equality.\n- Set the objective with CpModel::minimize(LinExpr).\n- Be aware: propagation has specialized logic for sum-of-booleans and general linear constraints; ensure your added constraints follow supported patterns or extend propagate / propagate_linear accordingly.\n\n4. Tuning solver behavior\n- SolverConfig.time_limit_seconds controls wall time; max_branches limits explored nodes.\n- Branching heuristics are in select_branch_variable; schedule-aware branching uses ScheduleBrancher and solve_scheduling. Adjust scoring to change variable priorities.\n\n5. WASM considerations\n- Bindings expect/return JSON strings. Keep serialization stable (serde) and return error payloads consistently via error_json.\n- Use wasm_build_profile() and get_schema_sql() for frontend/environment initialization tasks.\n\n6. Performance considerations\n- cpsat is valuable for correctness and small instances or unit tests. For practical semester schedules prefer local_search; solver.rs orchestrates which method is selected (see call graph).\n- Warm-starting the branch-and-bound solver with an initial feasible solution (ScheduleBrancher.hint_var) is implemented in schedule_branch_and_bound to get usable upper bounds quickly.\n\n7. Debugging tips\n- Violations from constraint::evaluate include rule_id and half_day/physician_id when applicable — these are the first stop when tracking infeasible plans.\n- CpModel.var_names and LinExpr evaluation helpers help map solver variable indices back to logical decisions when inspecting CpSolveResult values.\n- Local_search Schedule stores staffing and quota caches for O(1) deltas; watch reassign/swap functions when investigating correctness of moves.\n\nAppendix — Key functions & types (quick reference)\n- bindings.rs:\n - wasm_solve, wasm_evaluate, wasm_compute_equity, wasm_compute_resilience, wasm_simulate, wasm_diff_plannings, wasm_serialize_planning, wasm_save_*_sql, wasm_validate_ruleset\n- constraint.rs:\n - evaluate(assignments, ruleset, ctx) -> EvaluationResult\n - Violation, EvaluationResult, build_index, dispatch_rule, check_* functions\n- cpsat.rs:\n - CpModel, LinExpr, LinearConstraint, ImplicationConstraint, Constraint::AbsEquality\n - CpModel::new_bool_var/new_int_var/fix_var/add_linear_constraint/add_exactly/add_implication/add_abs_equality/minimize\n - sum_vars, weighted_sum, solve(model, config), solve_scheduling(model, config, brancher)\n - ScheduleBrancher\n- local_search.rs:\n - ScheduleConfig (build_config), Schedule (reassign, swap), Rng, build_config\n- diff.rs:\n - diff_plannings(before, after) -> PlanningDiff\n- equity.rs:\n - compute_equity(assignments, ctx, ruleset) -> EquityReport\n\nIf you are editing the solver orchestration (solver.rs), the call graph indicates solve(...) calls into both local_search::solve_local_search and cpsat builders (add_implication, add_abs_equality, weighted_sum). Keep interfaces and JSON contracts stable for bindings consumers.","wasm-js-engine-integration":"# WASM & JS Engine Integration\n\nWASM & JS Engine Integration\n===========================\n\nSummary\n-------\nThis module provides the browser-side integration between the UI and the Rust planning engine compiled to WebAssembly. It consists of two main parts:\n\n- A generated WASM \"glue\" JavaScript module (ui/src/lib/wasm/cm_plan_engine.js) that handles low-level memory, string encoding/decoding and the raw exported WASM calls.\n- A higher-level TypeScript facade (ui/src/lib/engine.ts and related files) that exposes a developer-friendly API used by the rest of the application and maps domain objects to/from JSON strings for the WASM functions.\n\nThe integration enforces a single explicit initialization step (initEngine) and validates that the embedded WASM build is a release build before enabling engine operations.\n\nWhen to read this document\n- You are adding or modifying calls from the UI into the WASM engine.\n- You need to add a new exported function to the WASM module (Rust side) and wire it into the UI.\n- You are debugging memory / string encoding issues between JS and WASM.\n- You want to understand where SQL/persistence helpers come from and what the engine facade exposes.\n\nArchitecture (high level)\n-------------------------\nMermaid overview of the components and their interactions:\n\n```mermaid\ngraph LR\n A[engine.ts (facade)] --> B[cm_plan_engine.js (WASM glue)]\n B --> C[cm_plan_engine_bg.wasm (Rust exports)]\n A --> D[engine/persistence.ts (SQL + serializers)]\n E[db.ts / api-browser.ts / stores] --> A\n```\n\nKey files\n---------\n- ui/src/lib/wasm/cm_plan_engine.js\n - Generated WASM glue. Exports low-level functions named like get_default_ruleset, wasm_solve, wasm_simulate, wasm_validate_ruleset, etc.\n - Handles memory allocation, UTF-8 encoding/decoding, and calling wasm.exports.\n - Provides initSync(module) and default async initializer (__wbg_init / exported default) that load/instantiate the .wasm file and call wasm.__wbindgen_start().\n\n- ui/src/lib/engine.ts\n - High-level TypeScript facade used by the app (public API).\n - Exposes initEngine() and functions like solve(), evaluate(), simulate(), computeEquity(), computeResilience(), diffPlannings(), validateRuleset(), and persistence SQL/serialization helpers.\n - Re-exports types and persistence helpers; enforces initialization and release-build check.\n\n- ui/src/lib/engine/persistence.ts\n - SQL schema (SCHEMA_SQL) and SQL/serialization helpers used by the browser (sql.js).\n - Functions like savePlanningSql, saveSemesterSql, serializePlanning/deserialize, purgeOldVersionsSql.\n\n- ui/src/lib/engine/rules.ts and ui/src/lib/engine/types.ts\n - Domain types (RuleSet, Planning, SemesterContext, etc.) and helpers (buildDefaultRuleset, defaultPlanning, defaultSemesterContext).\n - default_ruleset.json is the default RuleSet serialized on disk and loaded by buildDefaultRuleset().\n\nInitialization and lifecycle\n----------------------------\n1. Call initEngine() once before invoking any other engine functions.\n - initEngine loads ui/src/lib/wasm/cm_plan_engine.js dynamically (import('./wasm/cm_plan_engine.js')), then calls the module’s default export (the async WebAssembly initializer).\n - After loading, initEngine calls wasm_build_profile() and runs assertReleaseWasm(profile) to ensure the embedded WASM was built with the Rust \"release\" profile. If not, it throws an error with WASM_RELEASE_ERROR.\n - The function caches the mapped exports (e.g., wasmSolve = wasm.wasm_solve) so the rest of the facade calls the JS glue functions directly.\n\n2. The module maintains a single initPromise so multiple concurrent initEngine() calls return the same Promise and initialization runs only once.\n\n3. All facade functions call ensureInit()—this throws if initEngine() hasn't completed. That prevents accidental use before initialization.\n\nImportant exported initialization utilities in the glue:\n- initSync(module): synchronous initialization for environments where you already have a WebAssembly.Module (used rarely, e.g., server-side tests).\n- default export (__wbg_init): asynchronous loader that accepts a URL/Request/Response/object and instantiates the module.\n\nString & memory handling (what to know)\n--------------------------------------\nThe glue file exposes many functions which accept JS strings and return JS strings but under the hood they operate on WASM linear memory. The pattern the glue uses:\n\n- passStringToWasm0(arg, malloc, realloc)\n - Encodes a JS string to UTF-8 into wasm.memory via malloc/realloc.\n - Updates a module-wide WASM_VECTOR_LEN with the encoded byte length.\n - Uses a cached TextEncoder and, when encodeInto is missing, provides a fallback implementation.\n - For non-ASCII characters it may perform a realloc to accommodate multi-byte characters.\n\n- getStringFromWasm0(ptr, len)\n - Decodes UTF-8 bytes from wasm.memory starting at ptr for len bytes into a JS string.\n - Uses cached TextDecoder and special handling for Safari's decode limits (see MAX_SAFARI_DECODE_BYTES and reset logic).\n\n- Memory free pattern\n - The glue functions typically call a WASM function which returns [ptr, len] for a newly-allocated JS-returned string. Glue then:\n - Calls getStringFromWasm0(ret[0], ret[1]) to convert to JS string.\n - Always calls wasm.__wbindgen_free(ptr, len, 1) in a finally block to free the WASM allocation.\n - Example pattern (simplified):\n try {\n const ret = wasm.wasm_solve(...);\n const ptr = ret[0], len = ret[1];\n return getStringFromWasm0(ptr, len);\n } finally {\n wasm.__wbindgen_free(ptr, len, 1);\n }\n\n- Consequence for contributors:\n - When adding new exported functions on the Rust side that return strings, the glue generator will follow the same ptr/len pattern. Ensure the JS side frees allocations appropriately (the generated code does).\n\nFacade API (engine.ts)\n----------------------\nInitialization:\n- initEngine(): Promise<void>\n - Loads the WASM JS glue and .wasm, sets up the mapped function references, and validates the build profile.\n - Must be called before any other engine.* function.\n\nUtility checks:\n- ensureInit(): internal function used by all public methods to verify initialization.\n\nSchema & defaults:\n- getSchemaSql(): string\n - Returns the SQLite DDL as produced by the embedded WASM.\n - Called during DB initialization/migration (e.g., migrateIfNeeded → getSchemaSql).\n\n- getDefaultRuleset(): unknown\n - Returns the default ruleset JSON parsed into an object. Delegates to wasm.get_default_ruleset() and JSON.parse().\n\nEngine computational functions:\n- solve(ctx: unknown, ruleset: unknown, mode: string, reference: unknown, timeLimitSec: number, weekStart?: string, weekEnd?: string): unknown\n - Calls wasm_solve with JSON-serialized arguments. Returns a parsed result object (throws if result contains an error key).\n\n- evaluate(assignments: unknown, ruleset: unknown, ctx: unknown): unknown\n - Evaluates rules/violations for a given assignment set via wasm_evaluate and returns parsed JSON.\n\n- computeEquity(assignments: unknown, ctx: unknown, ruleset: unknown): unknown\n - Calls wasm_compute_equity; returns parsed JSON.\n\n- computeResilience(assignments: unknown, ctx: unknown, ruleset: unknown): unknown\n - Calls wasm_compute_resilience; returns parsed JSON.\n\n- simulate(ctx: unknown, ruleset: unknown, currentAssignments: unknown, hypotheticalAbsences: unknown, timeLimitSec: number): unknown\n - Calls wasm_simulate and returns parsed JSON.\n\n- diffPlannings(before: unknown, after: unknown): unknown\n - Calls wasm_diff_plannings; useful for generating PlanningDiff between two plannings.\n\n- validateRuleset(ruleset: unknown): { valid: boolean; errors: string[] }\n - Calls wasm_validate_ruleset, returns parsed validation result.\n\nPersistence / SQL helpers:\n- savePlanningSql(planning: unknown): string\n- saveRulesetSql(ruleset: unknown): string\n- saveSemesterSql(semesterId: string, ctx: unknown): string\n- serializePlanning(planning: unknown): string\n- serializeSemester(ctx: unknown): string\n\nNotes:\n- These persistence functions call into the WASM glue to generate SQL. The browser fallback also has similar helpers in engine/persistence.ts for environments that do not call WASM for SQL generation.\n- engine.ts also re-exports purgeOldVersionsSql from engine/persistence.ts.\n\nHow the rest of the app uses the facade\n--------------------------------------\n- DB initialization and migration call getSchemaSql() to get the DDL (e.g., src/lib/db.ts → migrateIfNeeded → getSchemaSql()).\n- API/browser persistence paths call savePlanningSql/saveSemesterSql/saveRulesetSql to get SQL statements before executing them in sql.js or another engine.\n- UI stores and default creation routines use buildDefaultRuleset() (in rules.ts) and getDefaultRuleset() (engine.ts) interchangeably — buildDefaultRuleset is a TypeScript copy of the embedded default ruleset, while getDefaultRuleset comes from WASM. createDefaultRuleset in api-browser.ts uses getDefaultRuleset().\n\nCommon pitfalls and troubleshooting\n---------------------------------\n- Initialization order\n - Always call await initEngine() before other engine.* functions. ensureInit() will throw if you forget.\n\n- Build profile check\n - initEngine calls wasm_build_profile() and requires the profile string to be 'release'. If you run a non-release WASM build in the browser, initEngine will throw with WAVM_RELEASE_ERROR. Rebuild with the release target (the repository's README / build scripts include npm run wasm:build).\n\n- String encoding / memory errors\n - The glue does explicit malloc/realloc and __wbindgen_free calls. If you see corrupted strings, memory access errors, or leaks, confirm:\n - You are using the generated glue as-is (don't hand-edit generated glue unless you understand JS/WASM memory).\n - The Rust export ABI matches the generated glue (regenerate glue after changing Rust exports).\n - For long-running decode usage, the glue resets TextDecoder occasionally (Safari workaround).\n\n- Async vs sync initialization\n - Use the async initEngine in the browser. initSync(module) exists for environments where a compiled WebAssembly.Module is available and you prefer synchronous initialization, e.g., some node tests.\n\nContributing: adding new engine exports\n---------------------------------------\nWhen adding a new Rust function you want accessible from the UI:\n\n1. Add the function to the Rust crate and export it via wasm-bindgen (matching existing patterns that return strings or JSON).\n2. Rebuild the WASM artifact and regenerate the JS glue (project has a build script; use the repository's wasm build command).\n3. Update ui/src/lib/wasm/cm_plan_engine.js will be (re)generated; then:\n - Add the appropriate typed import entry in ui/src/lib/engine.ts via the named imports from './wasm/cm_plan_engine'.\n - Create a new local variable (e.g., let wasmNewFn: typeof WasmNewFn) in engine.ts and populate it in initEngine() the same way other functions are mapped:\n wasmNewFn = wasm.wasm_new_fn;\n - Add a typed wrapper that serializes inputs with JSON.stringify and parses the returned JSON (mirroring the other wrapper functions).\n4. Ensure any new return values follow the expected ptr/len pattern, so glue frees memory in finally blocks.\n\nTesting & debugging tips\n------------------------\n- Browser console:\n - If initialization fails due to MIME type issues with instantiateStreaming, the glue falls back to instantiate and logs a warning. See __wbg_load error handling.\n - Check for the WASM release profile string via console if initEngine throws.\n- Unit tests:\n - For pure TS helpers (engine/persistence.ts, rules.ts, types.ts) write unit tests without initializing WASM.\n - For integration tests that require WASM behavior, use initSync with a precompiled WebAssembly.Module or run initEngine in a test harness that serves the .wasm file with the correct MIME type.\n\nDomain types & where to find them\n--------------------------------\n- All shared domain types are in ui/src/lib/engine/types.ts (Planning, SemesterContext, Assignment, SimulationResult, SolverResult, etc.). Use these types to type-check calls to the engine facade or to assert the shape of returned objects.\n- RuleSet and related rule helpers live in ui/src/lib/engine/rules.ts; default ruleset JSON is in ui/src/lib/engine/default_ruleset.json and buildDefaultRuleset() returns a typed clone.\n\nMemory & performance considerations\n----------------------------------\n- The engine uses JSON serialization as its interoperability format: every facade call JSON.stringify()s JS objects and the WASM returns JSON strings. This is convenient but can be costly for very large objects. If you need to optimize:\n - Profile the size and frequency of calls; consider batching or reducing payload size.\n - For internal tooling, you might add binary/data-structured APIs on the Rust side, but that requires careful design and changes to the glue.\n\nReference mapping (summary of typical export names)\n--------------------------------------------------\n- get_default_ruleset ↔ engine.getDefaultRuleset\n- get_schema_sql ↔ engine.getSchemaSql\n- wasm_build_profile ↔ used during initEngine to assert release build\n- wasm_solve ↔ engine.solve\n- wasm_evaluate ↔ engine.evaluate\n- wasm_compute_equity ↔ engine.computeEquity\n- wasm_compute_resilience ↔ engine.computeResilience\n- wasm_simulate ↔ engine.simulate\n- wasm_diff_plannings ↔ engine.diffPlannings\n- wasm_validate_ruleset ↔ engine.validateRuleset\n- wasm_save_planning_sql / wasm_save_ruleset_sql / wasm_save_semester_sql ↔ engine.savePlanningSql / engine.saveRulesetSql / engine.saveSemesterSql\n- wasm_serialize_planning / wasm_serialize_semester ↔ engine.serializePlanning / engine.serializeSemester\n\nContact points in the codebase\n------------------------------\n- Callers that rely on engine API:\n - src/lib/db.ts: openDatabaseBytes/openDatabaseFile → migrateIfNeeded → getSchemaSql()\n - src/lib/api-browser.ts: savePlanning/saveSemester/saveRuleset → savePlanningSql/saveSemesterSql/saveRulesetSql\n - lib/stores/planning.svelte.ts: loadActiveRuleset → buildDefaultRuleset (TS-side), createNewSemester → defaultActivities\n - Various UI screens and CLI tests call solve/evaluate/simulate/computeEquity.\n\nFinal notes\n-----------\n- Keep the generated glue (cm_plan_engine.js) in sync with the Rust build. Do not hand-edit it unless you regenerate the corresponding Rust/WASM build.\n- Prefer using the typed facade in engine.ts for app code; it sanitizes JSON inputs/outputs, enforces initialization, and performs the release-profile safety check.\n- For any change affecting the WASM exports or the JSON schema between JS and Rust, update both sides and add tests to ensure compatibility.","web-ui":"# Web UI\n\nWeb UI — module documentation\n=============================\n\nPurpose\n-------\nThe Web UI module implements the client-side user interface and the client API layer for the CM planning application. It contains:\n\n- A lightweight UI theme (app.css) and page template (app.html).\n- Two alternative API implementations:\n - api-server.ts — a thin HTTP client that talks to the real backend at /api.\n - api-browser.ts — a browser-only implementation backed by the WASM engine and an in-memory sql.js database. This is used in the local/offline variant.\n- A small set of Svelte components for core editing flows (calendar of absences, weekly grid, modals, audit view, etc.).\n- Utilities for import/export (CSV/XLSX parsing, ICS/HTML exports) and local persistence/audit.\n\nAt build time the module resolution for $lib/api is replaced by a Vite plugin to point to either api-server.ts or api-browser.ts depending on the CM_VARIANT build flag. The consumer code always imports $lib/api.\n\nArchitecture overview\n--------------------\n- UI components interact primarily with the central planning store (lib/stores/planning.svelte).\n- Components call the API layer via the $lib/api barrel. That barrel resolves to either the server client (api-server.ts) or the browser-backed client (api-browser.ts).\n- The browser API implementation calls into:\n - the engine (./engine) — which exposes solve/evaluate/saveXxxSql helpers and default rulesets,\n - the local DB layer (./db) — a thin sql.js wrapper,\n - XLSX (SheetJS) for import parsing.\n\nSmall architecture diagram (simplified)\n--------------------------------------\n```mermaid\nflowchart LR\n UI[\"UI components (Svelte)\"]\n Store[\"planning store\"]\n ApiBarrel[\"$lib/api (barrel)\"]\n Server[\"api-server.ts\\n(HTTP /api)\"]\n Browser[\"api-browser.ts\\n(WASM + sql.js)\"]\n Engine[\"./engine (WASM)\"]\n DB[\"./db (sql.js)\"]\n\n UI --> Store\n Store --> ApiBarrel\n ApiBarrel --> Server\n ApiBarrel --> Browser\n Browser --> Engine\n Browser --> DB\n Browser --> XLSX[SheetJS]\n```\n\nKey responsibilities\n--------------------\n\n1. API layer (server vs browser)\n - api-server.ts\n - Request helper: request<T>(method, path, body?, query?)\n - Uses fetch() to call /api endpoints, attaches Authorization header when set.\n - Exposes typed functions used across the UI: login, me, listPlannings, getPlanning, savePlanning, listSemesters, getSemester, listAbsences, replaceAbsences, addAbsence, listGuards, import* endpoints, export* endpoints, overtimeAlerts, purgeOldVersions, queryAudit, etc.\n - setToken / getToken: manages client-side token used for Authorization header.\n - api-browser.ts\n - Implements the same public API surface but performs operations locally:\n - Persistence via db.execute / db.queryAll / db.queryOne (sql.js).\n - Calls to the engine via engine.solve(), engine.evaluate(), engine.savePlanningSql(), engine.saveSemesterSql(), engine.saveRulesetSql(), engine.getDefaultRuleset(), engine.diffPlannings(), etc.\n - import parsing with SheetJS (XLSX) and helper parsing functions (parseCsv, parseSheet, parseSwissDate).\n - Exports helpers: exportHtml, exportIcs, exportJson.\n - Audit logging via logAudit which inserts into audit_log table.\n - Authentication is simplified to a LOCAL_USER and local token. Functions: login(), me(), listUsers(), createUser(), setCurrentAuditUser() exist but are local/no-op for server semantics.\n - Special helper execEngineStmt(json) parses JSON produced by engine.save*Sql helpers and executes SQL via db.execute.\n\n Note: $lib/api just re-exports from the active variant. See api.ts: `export * from './api-server';` (overridden by Vite plugin).\n\n2. Engine & persistence integration (browser variant)\n - The browser client relies on two internal modules:\n - engine: exposes solver/evaluator and helpers that produce SQL statements (savePlanningSql, saveSemesterSql, saveRulesetSql) and other helpers like getDefaultRuleset, validateRuleset, diffPlannings, computeEquity, computeResilience, simulate.\n - db: SQL wrapper over sql.js; functions used include queryAll, queryOne, execute.\n - execEngineStmt receives a JSON-serialized SqlStatement from engine (as produced by save*Sql) and calls db.execute(stmt.sql, stmt.params).\n\n3. Import / export\n - Parsing entry points: importFile(filename, fileBase64) and applyImportToSemester(semesterId, filename, fileBase64).\n - Helpers:\n - parseCsv: parses CSV/TSV content to string[][].\n - base64ToArrayBuffer: for XLSX binary decoding.\n - parseSheet(rows): auto-detects format via detectFormat(headers) and returns arrays of absences, assignments, guards plus warnings.\n - parseSwissDate(raw): handles ISO, DD.MM.YYYY, DD/MM/YYYY and Excel serial numbers in the typical Swiss date patterns.\n - applyImportToSemester uses addAbsence and addGuard to insert parsed records, tracks created physician names and logs an audit entry via logAudit().\n\n4. Exports and utilities\n - computeVaudHolidays(year): provided in both api-browser.ts and api-server.ts; both implement the Anonymous Gregorian algorithm for Easter and construct canton-specific holidays (SDS-9004).\n - exportIcs(planningId, physicianId, semesterId?): builds a minimal ICS calendar by iterating assignments in the chosen planning; uses uuid() for UIDs.\n - exportHtml: builds a simple printable HTML snapshot from the latest planning stored in the DB (browser variant).\n\n5. Audit\n - queryAudit(filters?) to read audit_log; logAudit(entry) to record audit entries. In browser variant logAudit inserts into audit_log table (with uuid(), timestamp, user lookup via _currentUsername). In server variant logAudit is a noop returning { ok: true } (server handles audit).\n\nCore files & important functions\n--------------------------------\n- ui/src/lib/api.ts\n - Barrel module; resolves to either api-server.ts or api-browser.ts in build-time configuration. Keep all UI imports as `import * as api from '$lib/api'`.\n\n- ui/src/lib/api-server.ts\n - request<T>(method, path, body?, query?): central HTTP helper.\n - setToken / getToken / setCurrentAuditUser\n - Exports: login, me, listPlannings, getPlanning, savePlanning, listPlanningVersions, listPlanningVersionDetails, setPlanningStatus, diffPlannings, solve, evaluate, evaluateDetailed, computeEquity, computeResilience, simulate, listSemesters, latestSemesterId, getSemester, saveSemester, deleteSemester, listAbsences, replaceAbsences, addAbsence, mergeAbsenceCell (placeholder), listGuards, replaceGuards, addGuard, listRulesets, getRuleset, saveRuleset, createDefaultRuleset, validateRuleset, cloneRuleset, previewRuleset, getHolidays, exportJson, exportHtml, exportIcs, overtimeAlerts, impactReport, closedOrJournal, estimateHs, lockedConflicts, purgeOldVersions, evaluateRuleset, import endpoints, queryAudit, logAudit (noop).\n\n- ui/src/lib/api-browser.ts\n - execEngineStmt(json): parse engine JSON to SQL and run via db.execute.\n - Authentication helpers: setCurrentAuditUser, setToken, getToken, login, me, oidcStatus, seedAdmin, listUsers, createUser, updateUserRoles, deleteUser.\n - Planning persistence: listPlannings, getPlanning, savePlanning (calls engine.savePlanningSql + execEngineStmt), listPlanningVersions, listPlanningVersionDetails, setPlanningStatus, diffPlannings (uses engine.diffPlannings).\n - Engine wrappers: solve, evaluate, computeEquity, computeResilience, simulate (call engine.* directly).\n - Semester handling: listSemesters, latestSemesterId, getSemester (also merges absences/guards into returned ctx), saveSemester (calls engine.saveSemesterSql), deleteSemester.\n - Absences & Guards: listAbsences, replaceAbsences, addAbsence, mergeAbsenceCell, listGuards, replaceGuards, addGuard.\n - Rulesets: listRulesets, getRuleset, saveRuleset (uses engine.saveRulesetSql), createDefaultRuleset, validateRuleset, cloneRuleset (calls getRuleset + uuid), previewRuleset (uses engine.evaluate).\n - Imports/Parsing: parseCsv, base64ToArrayBuffer, parseSheet, importGuardsCsv, importGuardsXlsx, importAbsencesCsv, importAbsencesXlsx, importFile, applyImportToSemester.\n - Exports: getHolidays (computeVaudHolidays), exportJson, exportHtml, exportIcs.\n - Audit: queryAudit, logAudit (inserts to audit_log).\n\nSvelte components (high-level)\n------------------------------\nBelow are the major UI components in ui/src/lib/components with their core behaviors and the names of functions they call or expect from stores/api:\n\n1. AbsenceCalendar.svelte\n - Purpose: native absence calendar editor (SDS-8014–8019).\n - Key state: semesterId, physicians list, absences (AbsenceRecord[]), holidays, selectedCategory.\n - Drag-select to create multiple-day absences; multi-cell selection state (dragging, dragPhysician, dragDays).\n - Uses api.listSemesters(), api.getSemester(), api.replaceAbsences(), api.importFile(), api.logAudit().\n - On save: calls api.replaceAbsences(semesterId, absences) then planning.syncAbsencesToGrid(flatRecords).\n - Parsing and import category mapping via mapImportCategory.\n - Uses savedSnapshot / dirty logic to track unsaved changes.\n - Helpers present: parse/format semester label, canCreateAbsence (disallow some categories on weekends/holidays).\n\n2. AbsenceManager.svelte\n - Simple wrapper that renders AbsenceCalendar.\n\n3. ActivityCatalog.svelte\n - Shows custom activity catalog; calls planning.addActivity and planning.removeActivity.\n - UI prevents removal of builtin activity codes (uses defaultActivities()).\n\n4. AlertPanel.svelte\n - Shows planning.alerts length and list.\n - Each alert has actions which call planning.showToast for demo interactions.\n\n5. AuditLog.svelte\n - Uses api.queryAudit() to load recent audit entries.\n - Purge buttons call an inline purgeOlderThan(days) which directly executes deletion via db.execute (local-only helper) then logs via api.logAudit() if available.\n - Authorization gating via auth.hasPermission('MANAGE_SYSTEM').\n\n6. CellModal.svelte\n - Modal overlay for editing a specific grid cell; interacts heavily with planning store:\n - planning.openModal / closeModal / assignPhysician / removePhysician / addClosure / removeClosure / togglePersonLock / setPersonNote / getPersonNote / personMark.\n - Implements OP-operator absent detection and replacement (absentInOp, opReplacements) and CONSULT_MULTI shortcut.\n - toggleClosure(prefillJustification?) prompts user and calls planning.addClosure.\n\n7. GardeStrip.svelte\n - A small strip to manage weekend garde names; calls planning.addGarde and planning.removeGarde.\n\n8. Grid.svelte\n - Weekly grid, keyboard navigation and drag-and-drop reassignment.\n - Interacts with GridCell.svelte (not fully shown here) and uses many planning.* helpers: filteredRows, staleWeeks, deriveWeeklyFromTemplate, dismissStaleWeek, openModal, selectCell, moveCell, canDropOnRow, personMark, getPersonNote, etc.\n - Implements drag native events to support moving assignment pills.\n\nImportant interactions and flows\n-------------------------------\n- Switching API variant:\n - $lib/api is resolved at build time by the Vite plugin; to change between server and browser modes set CM_VARIANT in the build pipeline (see vite.config.ts referenced in source). Do not import api-server or api-browser directly from UI components — always import from $lib/api.\n\n- Save planning (browser variant)\n - savePlanning(planning) -> engine.savePlanningSql(planning) -> execEngineStmt(JSON) -> db.execute(SQL).\n - Referenced in call graph: savePlanning (api-browser.ts) → savePlanningSql (engine) → execEngineStmt -> db.\n\n- Import flows\n - importAbsencesCsv → latestSemesterId → listSemesters → db.queryAll → getDb (see call graph: ImportAbsencesCsv → GetDb).\n - importFile uses parseSheet(rows). parseSheet detects format (guards/absences/planning) using header heuristics and parses dates via parseSwissDate().\n\n- Purge old versions (browser)\n - purgeOldVersions(planningId, keepCount) queries existing versions, calls engine.purgeOldVersionsSql(planningId, keepCount) to get an SQL stmt and executes it via db.execute.\n\nExtension points and where to modify behavior\n---------------------------------------------\n- Adding new API endpoints (server)\n - Update api-server.ts to add a typed wrapper and update server backend implementation. UI should call the new function via $lib/api so both variants can share names.\n- Extending browser behavior\n - api-browser.ts is the place to implement new local behaviors:\n - To persist new structures in the browser DB, either call engine.save*Sql helpers (if they exist) and execEngineStmt, or run db.execute with explicit SQL.\n - For DB schema changes see src/lib/engine.ts (getSchemaSql) and src/lib/db.ts migrateIfNeeded logic.\n- Import formats\n - parseSheet in api-browser.ts is the canonical place to add new heuristics for imported spreadsheets. Use detectFormat(headers) and extend header regexes (GUARD_HEADERS, ABSENCE_HEADERS) or add column mappings.\n - parseSwissDate implements common Swiss date formats and Excel serial numbers; extend here if other date formats are required.\n- Holidays\n - computeVaudHolidays(year) is implemented both server and browser side; modify here to change holiday rules.\n- Audit\n - Browser variant writes audit entries into the local audit_log table via logAudit. Server variant expects the backend to provide audit. If you extend log fields in the DB, update api-browser.logAudit and server-side API accordingly.\n\nDebugging notes\n---------------\n- Inspecting DB (browser variant)\n - The browser implementation uses sql.js. To inspect the database, you can:\n - add temporary exports in src/lib/db.ts to expose the underlying sql.js Database object, or\n - run console.log on rows returned by db.queryAll or add debugging functions that dump table schema.\n- Engine integration\n - api-browser.execEngineStmt accepts JSON generated by engine.save*Sql helpers and runs the SQL. If persistence isn’t behaving as expected, log the JSON returned by engine.savePlanningSql/planning to inspect the SQL and params.\n- Import/parse problems\n - parseSheet returns warnings array; the UI surfaces warnings via planning.showToast or import result. Use those warnings to debug mismatches in header detection.\n- Authentication\n - api-browser uses a local token and LOCAL_USER; setCurrentAuditUser(username) is called by auth store to set audit user. api-server.setToken should be called after successful login to ensure Authorization header is attached.\n\nCall graph & important execution flows (high level)\n---------------------------------------------------\n- savePlanning (api-browser.ts) → engine.savePlanningSql(planning) → execEngineStmt → db.execute\n- importAbsencesCsv/importGuardsXlsx/importFile → parseSheet → addAbsence/addGuard → db.execute\n- previewRuleset (api-browser.ts) → getRuleset -> engine.evaluate(...) — this evaluates a planning against a ruleset locally for preview\n- purgeOldVersions(planningId, keepCount) → engine.purgeOldVersionsSql -> db.execute\n- Multiple store routines call into api.* functions: planning store uses listSemesters/getSemester/saveSemester/listPlannings/etc.\n\nDeveloper checklist for common tasks\n-----------------------------------\n- Add new client API function:\n 1. Add function signature and implementation to both api-server.ts (HTTP wrapper) and api-browser.ts (local behavior) keeping identical exported names and types.\n 2. Update any tests or UI components to call via $lib/api.\n 3. If behavior requires DB schema changes in browser mode, add corresponding engine SQL generation (or direct db.execute) and migration logic in src/lib/db.ts (migrateIfNeeded).\n\n- Add a new import pattern:\n 1. Update detectFormat and parseSheet in api-browser.ts.\n 2. Ensure parseSwissDate supports any new date representations.\n 3. Update applyImportToSemester to map parsed objects to addAbsence/addGuard appropriately.\n\n- Change holidays or canton rules:\n 1. Update computeVaudHolidays in api-browser.ts and api-server.ts for parity.\n 2. If UI depends on holiday metadata (AbsenceCalendar), no additional changes needed.\n\n- Debugging planner operations:\n 1. Use browser devtools console to inspect data returned by api functions.\n 2. Add console.log in execEngineStmt to see SQL statements produced by engine.*Sql helpers.\n 3. Inspect planning store state (planning.svelte) for currentWeek, closures, alerts, and dirty flags.\n\nNotes and gotchas\n-----------------\n- Always import API through $lib/api. The Vite plugin will replace this single import at build time; importing api-server or api-browser directly bypasses the variant selection.\n- The browser implementation keeps simplified authentication — tests that rely on auth/roles should be aware LOCAL_USER is used with ADMIN/MANAGER roles.\n- parseSheet is tolerant but heuristic — production imports often require manual mapping of names to internal physician IDs; api-browser currently stores physician identifiers as provided (physician_name used as physician_id in many places), so mapping is required for robust imports.\n- execEngineStmt is a critical junction: engine.*Sql helpers return JSON-serialized SQL statements that are executed by the DB. Any change in engine.save*Sql output must be validated against db.execute semantics.\n\nWhere things live in source tree\n-------------------------------\n- API layer: ui/src/lib/api.ts, api-server.ts, api-browser.ts\n- Engine integration: ui/src/lib/engine (WASM JS bindings)\n- DB wrapper: ui/src/lib/db.ts (sql.js wrapper)\n- UI components: ui/src/lib/components/*.svelte (AbsenceCalendar.svelte, Grid.svelte, CellModal.svelte, AuditLog.svelte, etc.)\n- Styles & HTML: ui/src/app.css, ui/src/app.html\n- Utilities: parse & import logic in api-browser.ts; uuid helper referenced via uuid() in operations.\n\nIf you are adding features or fixing bugs, follow the \"Extension points\" and \"Developer checklist\" above and ensure parity between the server and browser implementations for API function names and behavior where relevant."};
var TREE = [{"name":"Rust Engine","slug":"rust-engine","files":["engine/src/bindings.rs","engine/src/constraint.rs","engine/src/cpsat.rs","engine/src/default_ruleset.json","engine/src/diff.rs","engine/src/equity.rs","engine/src/lib.rs","engine/src/local_search.rs","engine/src/persistence.rs","engine/src/repair.rs","engine/src/resilience.rs","engine/src/rules.rs","engine/src/simulation.rs","engine/src/solver.rs","engine/src/types.rs"]},{"name":"Python Backend API","slug":"python-backend-api","files":["src/cm_plan/__init__.py","src/cm_plan/api/app.py","src/cm_plan/api/auth_service.py","src/cm_plan/api/deps.py","src/cm_plan/api/engine_app.py","src/cm_plan/api/oidc_service.py","src/cm_plan/api/routes.py","src/cm_plan/api/routes_auth.py","src/cm_plan/api/routes_persistence.py","src/cm_plan/api/schemas.py"]},{"name":"Backend Domain & Ports","slug":"backend-domain-ports","files":["src/cm_plan/domain/__init__.py","src/cm_plan/domain/auth.py","src/cm_plan/domain/ports.py","src/cm_plan/domain/rules.py","src/cm_plan/domain/types.py"]},{"name":"Backend Adapters & Persistence","slug":"backend-adapters-persistence","files":["src/cm_plan/adapters/__init__.py","src/cm_plan/adapters/pg_adapter.py","src/cm_plan/adapters/sqlite_adapter.py"]},{"name":"Python Engine (Business Logic)","slug":"python-engine-business-logic","files":["src/cm_plan/engine/__init__.py","src/cm_plan/engine/constraint.py","src/cm_plan/engine/diff.py","src/cm_plan/engine/equity.py","src/cm_plan/engine/exports.py","src/cm_plan/engine/filters/__init__.py","src/cm_plan/engine/filters/absences_chuv.py","src/cm_plan/engine/filters/absences_generic.py","src/cm_plan/engine/filters/guards_generic.py","src/cm_plan/engine/filters/guards_pdf_chuv.py","src/cm_plan/engine/filters/weekly_plan_chuv.py","src/cm_plan/engine/imports.py","src/cm_plan/engine/repair.py","src/cm_plan/engine/resilience.py","src/cm_plan/engine/rotation.py","src/cm_plan/engine/simulation.py","src/cm_plan/engine/solver.py"]},{"name":"WASM & JS Engine Integration","slug":"wasm-js-engine-integration","files":["ui/src/lib/wasm/cm_plan_engine.js","ui/src/lib/engine.ts","ui/src/lib/engine/persistence.ts","ui/src/lib/engine/rules.ts","ui/src/lib/engine/types.ts","ui/src/lib/engine/default_ruleset.json"]},{"name":"Web UI","slug":"web-ui","files":["ui/src/app.css","ui/src/app.html","ui/src/lib/api-browser.ts","ui/src/lib/api-server.ts","ui/src/lib/api.ts","ui/src/lib/components/AbsenceCalendar.svelte","ui/src/lib/components/AbsenceManager.svelte","ui/src/lib/components/ActivityCatalog.svelte","ui/src/lib/components/AlertPanel.svelte","ui/src/lib/components/AuditLog.svelte","ui/src/lib/components/CellModal.svelte","ui/src/lib/components/GardeStrip.svelte","ui/src/lib/components/Grid.svelte","ui/src/lib/components/GridCell.svelte","ui/src/lib/components/GuardRoster.svelte","ui/src/lib/components/HistoryView.svelte","ui/src/lib/components/HolidayEditor.svelte","ui/src/lib/components/HsGauge.svelte","ui/src/lib/components/ImpactReport.svelte","ui/src/lib/components/ImportPanel.svelte","ui/src/lib/components/MetricsStrip.svelte","ui/src/lib/components/NewSemesterModal.svelte","ui/src/lib/components/OptimizeBanner.svelte","ui/src/lib/components/PhysicianManager.svelte","ui/src/lib/components/RoomManager.svelte","ui/src/lib/components/RuleEditor.svelte","ui/src/lib/components/Sidebar.svelte","ui/src/lib/components/SolverOverlay.svelte","ui/src/lib/components/StandardTemplateView.svelte","ui/src/lib/components/Toast.svelte","ui/src/lib/components/Topbar.svelte","ui/src/lib/components/UserManager.svelte","ui/src/lib/components/VersionDiffViewer.svelte","ui/src/lib/data.ts","ui/src/lib/db.ts","ui/src/lib/planningBridge.ts","ui/src/lib/planningRecovery.ts","ui/src/lib/solver-id-map.ts","ui/src/lib/solver.worker.ts","ui/src/lib/stores/auth.svelte.ts","ui/src/lib/stores/planning.svelte.ts","ui/src/lib/types.ts","ui/src/lib/uuid.ts","ui/src/routes/+layout.svelte","ui/src/routes/+page.svelte","ui/src/routes/+page.ts"]},{"name":"Other","slug":"other","files":[],"children":[{"name":"Other — AGENTS.md","slug":"other-agents-md","files":["AGENTS.md"]},{"name":"Other — README.md","slug":"other-readme-md","files":["README.md"]},{"name":"Other — docker-compose.postgres.yml","slug":"other-docker-compose-postgres-yml","files":["docker-compose.postgres.yml"]},{"name":"Other — docker-compose.self-contained.yml","slug":"other-docker-compose-self-contained-yml","files":["docker-compose.self-contained.yml"]},{"name":"Other — docker-compose.yml","slug":"other-docker-compose-yml","files":["docker-compose.yml"]},{"name":"Other — docker-backend","slug":"other-docker-backend","files":["docker/backend/Dockerfile","docker/backend/Dockerfile.dev"]},{"name":"Other — docker-engine","slug":"other-docker-engine","files":["docker/engine/Dockerfile"]},{"name":"Other — docker-ui","slug":"other-docker-ui","files":["docker/ui/Dockerfile","docker/ui/Dockerfile.dev","docker/ui/Dockerfile.self-contained","docker/ui/nginx.conf","docker/ui/nginx.self-contained.conf"]},{"name":"Other — docs","slug":"other-docs","files":["docs/SDS-traceability-matrix.md","docs/SDS_Planification_CM.md"]},{"name":"Other — engine","slug":"other-engine","files":["engine/Cargo.toml"]},{"name":"Other — pyproject.toml","slug":"other-pyproject-toml","files":["pyproject.toml"]},{"name":"Other — src-cm_plan","slug":"other-src-cm-plan","files":["src/cm_plan/api/__init__.py"]},{"name":"Other — tests","slug":"other-tests","files":["tests/__init__.py","tests/conftest.py","tests/test_api.py","tests/test_auth.py","tests/test_constraints.py","tests/test_diff.py","tests/test_equity.py","tests/test_exports.py","tests/test_filters.py","tests/test_imports.py","tests/test_persistence.py","tests/test_resilience.py","tests/test_rotation.py","tests/test_rules.py","tests/test_simulation.py","tests/test_solver.py","tests/test_types.py"]},{"name":"Other — ui","slug":"other-ui","files":["ui/package.json","ui/svelte.config.js","ui/tsconfig.json","ui/vite.config.ts","ui/vitest.config.ts"]},{"name":"Other — ui-scripts","slug":"other-ui-scripts","files":["ui/scripts/build-engine-wasm.mjs","ui/scripts/bundle-single.js","ui/scripts/verify-engine-wasm-release.mjs"]},{"name":"Other — ui-src","slug":"other-ui-src","files":["ui/src/lib/wasm/package.json"]}]}];
var META = {"fromCommit":"c0422c0d1a92a94242d08a27799e3490f2ff3f2f","generatedAt":"2026-04-16T21:27:28.555Z","model":"gpt-5-mini","moduleFiles":{"Rust Engine":["engine/src/bindings.rs","engine/src/constraint.rs","engine/src/cpsat.rs","engine/src/default_ruleset.json","engine/src/diff.rs","engine/src/equity.rs","engine/src/lib.rs","engine/src/local_search.rs","engine/src/persistence.rs","engine/src/repair.rs","engine/src/resilience.rs","engine/src/rules.rs","engine/src/simulation.rs","engine/src/solver.rs","engine/src/types.rs"],"Python Backend API":["src/cm_plan/__init__.py","src/cm_plan/api/app.py","src/cm_plan/api/auth_service.py","src/cm_plan/api/deps.py","src/cm_plan/api/engine_app.py","src/cm_plan/api/oidc_service.py","src/cm_plan/api/routes.py","src/cm_plan/api/routes_auth.py","src/cm_plan/api/routes_persistence.py","src/cm_plan/api/schemas.py"],"Backend Domain & Ports":["src/cm_plan/domain/__init__.py","src/cm_plan/domain/auth.py","src/cm_plan/domain/ports.py","src/cm_plan/domain/rules.py","src/cm_plan/domain/types.py"],"Backend Adapters & Persistence":["src/cm_plan/adapters/__init__.py","src/cm_plan/adapters/pg_adapter.py","src/cm_plan/adapters/sqlite_adapter.py"],"Python Engine (Business Logic)":["src/cm_plan/engine/__init__.py","src/cm_plan/engine/constraint.py","src/cm_plan/engine/diff.py","src/cm_plan/engine/equity.py","src/cm_plan/engine/exports.py","src/cm_plan/engine/filters/__init__.py","src/cm_plan/engine/filters/absences_chuv.py","src/cm_plan/engine/filters/absences_generic.py","src/cm_plan/engine/filters/guards_generic.py","src/cm_plan/engine/filters/guards_pdf_chuv.py","src/cm_plan/engine/filters/weekly_plan_chuv.py","src/cm_plan/engine/imports.py","src/cm_plan/engine/repair.py","src/cm_plan/engine/resilience.py","src/cm_plan/engine/rotation.py","src/cm_plan/engine/simulation.py","src/cm_plan/engine/solver.py"],"WASM & JS Engine Integration":["ui/src/lib/wasm/cm_plan_engine.js","ui/src/lib/engine.ts","ui/src/lib/engine/persistence.ts","ui/src/lib/engine/rules.ts","ui/src/lib/engine/types.ts","ui/src/lib/engine/default_ruleset.json"],"Web UI":["ui/src/app.css","ui/src/app.html","ui/src/lib/api-browser.ts","ui/src/lib/api-server.ts","ui/src/lib/api.ts","ui/src/lib/components/AbsenceCalendar.svelte","ui/src/lib/components/AbsenceManager.svelte","ui/src/lib/components/ActivityCatalog.svelte","ui/src/lib/components/AlertPanel.svelte","ui/src/lib/components/AuditLog.svelte","ui/src/lib/components/CellModal.svelte","ui/src/lib/components/GardeStrip.svelte","ui/src/lib/components/Grid.svelte","ui/src/lib/components/GridCell.svelte","ui/src/lib/components/GuardRoster.svelte","ui/src/lib/components/HistoryView.svelte","ui/src/lib/components/HolidayEditor.svelte","ui/src/lib/components/HsGauge.svelte","ui/src/lib/components/ImpactReport.svelte","ui/src/lib/components/ImportPanel.svelte","ui/src/lib/components/MetricsStrip.svelte","ui/src/lib/components/NewSemesterModal.svelte","ui/src/lib/components/OptimizeBanner.svelte","ui/src/lib/components/PhysicianManager.svelte","ui/src/lib/components/RoomManager.svelte","ui/src/lib/components/RuleEditor.svelte","ui/src/lib/components/Sidebar.svelte","ui/src/lib/components/SolverOverlay.svelte","ui/src/lib/components/StandardTemplateView.svelte","ui/src/lib/components/Toast.svelte","ui/src/lib/components/Topbar.svelte","ui/src/lib/components/UserManager.svelte","ui/src/lib/components/VersionDiffViewer.svelte","ui/src/lib/data.ts","ui/src/lib/db.ts","ui/src/lib/planningBridge.ts","ui/src/lib/planningRecovery.ts","ui/src/lib/solver-id-map.ts","ui/src/lib/solver.worker.ts","ui/src/lib/stores/auth.svelte.ts","ui/src/lib/stores/planning.svelte.ts","ui/src/lib/types.ts","ui/src/lib/uuid.ts","ui/src/routes/+layout.svelte","ui/src/routes/+page.svelte","ui/src/routes/+page.ts"],"Other":["AGENTS.md","README.md","docker-compose.postgres.yml","docker-compose.self-contained.yml","docker-compose.yml","docker/backend/Dockerfile","docker/backend/Dockerfile.dev","docker/engine/Dockerfile","docker/ui/Dockerfile","docker/ui/Dockerfile.dev","docker/ui/Dockerfile.self-contained","docker/ui/nginx.conf","docker/ui/nginx.self-contained.conf","docs/SDS-traceability-matrix.md","docs/SDS_Planification_CM.md","engine/Cargo.toml","pyproject.toml","src/cm_plan/api/__init__.py","tests/__init__.py","tests/conftest.py","tests/test_api.py","tests/test_auth.py","tests/test_constraints.py","tests/test_diff.py","tests/test_equity.py","tests/test_exports.py","tests/test_filters.py","tests/test_imports.py","tests/test_persistence.py","tests/test_resilience.py","tests/test_rotation.py","tests/test_rules.py","tests/test_simulation.py","tests/test_solver.py","tests/test_types.py","ui/package.json","ui/svelte.config.js","ui/tsconfig.json","ui/vite.config.ts","ui/vitest.config.ts","ui/scripts/build-engine-wasm.mjs","ui/scripts/bundle-single.js","ui/scripts/verify-engine-wasm-release.mjs","ui/src/lib/wasm/package.json"],"Other — AGENTS.md":["AGENTS.md"],"Other — README.md":["README.md"],"Other — docker-compose.postgres.yml":["docker-compose.postgres.yml"],"Other — docker-compose.self-contained.yml":["docker-compose.self-contained.yml"],"Other — docker-compose.yml":["docker-compose.yml"],"Other — docker-backend":["docker/backend/Dockerfile","docker/backend/Dockerfile.dev"],"Other — docker-engine":["docker/engine/Dockerfile"],"Other — docker-ui":["docker/ui/Dockerfile","docker/ui/Dockerfile.dev","docker/ui/Dockerfile.self-contained","docker/ui/nginx.conf","docker/ui/nginx.self-contained.conf"],"Other — docs":["docs/SDS-traceability-matrix.md","docs/SDS_Planification_CM.md"],"Other — engine":["engine/Cargo.toml"],"Other — pyproject.toml":["pyproject.toml"],"Other — src-cm_plan":["src/cm_plan/api/__init__.py"],"Other — tests":["tests/__init__.py","tests/conftest.py","tests/test_api.py","tests/test_auth.py","tests/test_constraints.py","tests/test_diff.py","tests/test_equity.py","tests/test_exports.py","tests/test_filters.py","tests/test_imports.py","tests/test_persistence.py","tests/test_resilience.py","tests/test_rotation.py","tests/test_rules.py","tests/test_simulation.py","tests/test_solver.py","tests/test_types.py"],"Other — ui":["ui/package.json","ui/svelte.config.js","ui/tsconfig.json","ui/vite.config.ts","ui/vitest.config.ts"],"Other — ui-scripts":["ui/scripts/build-engine-wasm.mjs","ui/scripts/bundle-single.js","ui/scripts/verify-engine-wasm-release.mjs"],"Other — ui-src":["ui/src/lib/wasm/package.json"]},"moduleTree":[{"name":"Rust Engine","slug":"rust-engine","files":["engine/src/bindings.rs","engine/src/constraint.rs","engine/src/cpsat.rs","engine/src/default_ruleset.json","engine/src/diff.rs","engine/src/equity.rs","engine/src/lib.rs","engine/src/local_search.rs","engine/src/persistence.rs","engine/src/repair.rs","engine/src/resilience.rs","engine/src/rules.rs","engine/src/simulation.rs","engine/src/solver.rs","engine/src/types.rs"]},{"name":"Python Backend API","slug":"python-backend-api","files":["src/cm_plan/__init__.py","src/cm_plan/api/app.py","src/cm_plan/api/auth_service.py","src/cm_plan/api/deps.py","src/cm_plan/api/engine_app.py","src/cm_plan/api/oidc_service.py","src/cm_plan/api/routes.py","src/cm_plan/api/routes_auth.py","src/cm_plan/api/routes_persistence.py","src/cm_plan/api/schemas.py"]},{"name":"Backend Domain & Ports","slug":"backend-domain-ports","files":["src/cm_plan/domain/__init__.py","src/cm_plan/domain/auth.py","src/cm_plan/domain/ports.py","src/cm_plan/domain/rules.py","src/cm_plan/domain/types.py"]},{"name":"Backend Adapters & Persistence","slug":"backend-adapters-persistence","files":["src/cm_plan/adapters/__init__.py","src/cm_plan/adapters/pg_adapter.py","src/cm_plan/adapters/sqlite_adapter.py"]},{"name":"Python Engine (Business Logic)","slug":"python-engine-business-logic","files":["src/cm_plan/engine/__init__.py","src/cm_plan/engine/constraint.py","src/cm_plan/engine/diff.py","src/cm_plan/engine/equity.py","src/cm_plan/engine/exports.py","src/cm_plan/engine/filters/__init__.py","src/cm_plan/engine/filters/absences_chuv.py","src/cm_plan/engine/filters/absences_generic.py","src/cm_plan/engine/filters/guards_generic.py","src/cm_plan/engine/filters/guards_pdf_chuv.py","src/cm_plan/engine/filters/weekly_plan_chuv.py","src/cm_plan/engine/imports.py","src/cm_plan/engine/repair.py","src/cm_plan/engine/resilience.py","src/cm_plan/engine/rotation.py","src/cm_plan/engine/simulation.py","src/cm_plan/engine/solver.py"]},{"name":"WASM & JS Engine Integration","slug":"wasm-js-engine-integration","files":["ui/src/lib/wasm/cm_plan_engine.js","ui/src/lib/engine.ts","ui/src/lib/engine/persistence.ts","ui/src/lib/engine/rules.ts","ui/src/lib/engine/types.ts","ui/src/lib/engine/default_ruleset.json"]},{"name":"Web UI","slug":"web-ui","files":["ui/src/app.css","ui/src/app.html","ui/src/lib/api-browser.ts","ui/src/lib/api-server.ts","ui/src/lib/api.ts","ui/src/lib/components/AbsenceCalendar.svelte","ui/src/lib/components/AbsenceManager.svelte","ui/src/lib/components/ActivityCatalog.svelte","ui/src/lib/components/AlertPanel.svelte","ui/src/lib/components/AuditLog.svelte","ui/src/lib/components/CellModal.svelte","ui/src/lib/components/GardeStrip.svelte","ui/src/lib/components/Grid.svelte","ui/src/lib/components/GridCell.svelte","ui/src/lib/components/GuardRoster.svelte","ui/src/lib/components/HistoryView.svelte","ui/src/lib/components/HolidayEditor.svelte","ui/src/lib/components/HsGauge.svelte","ui/src/lib/components/ImpactReport.svelte","ui/src/lib/components/ImportPanel.svelte","ui/src/lib/components/MetricsStrip.svelte","ui/src/lib/components/NewSemesterModal.svelte","ui/src/lib/components/OptimizeBanner.svelte","ui/src/lib/components/PhysicianManager.svelte","ui/src/lib/components/RoomManager.svelte","ui/src/lib/components/RuleEditor.svelte","ui/src/lib/components/Sidebar.svelte","ui/src/lib/components/SolverOverlay.svelte","ui/src/lib/components/StandardTemplateView.svelte","ui/src/lib/components/Toast.svelte","ui/src/lib/components/Topbar.svelte","ui/src/lib/components/UserManager.svelte","ui/src/lib/components/VersionDiffViewer.svelte","ui/src/lib/data.ts","ui/src/lib/db.ts","ui/src/lib/planningBridge.ts","ui/src/lib/planningRecovery.ts","ui/src/lib/solver-id-map.ts","ui/src/lib/solver.worker.ts","ui/src/lib/stores/auth.svelte.ts","ui/src/lib/stores/planning.svelte.ts","ui/src/lib/types.ts","ui/src/lib/uuid.ts","ui/src/routes/+layout.svelte","ui/src/routes/+page.svelte","ui/src/routes/+page.ts"]},{"name":"Other","slug":"other","files":[],"children":[{"name":"Other — AGENTS.md","slug":"other-agents-md","files":["AGENTS.md"]},{"name":"Other — README.md","slug":"other-readme-md","files":["README.md"]},{"name":"Other — docker-compose.postgres.yml","slug":"other-docker-compose-postgres-yml","files":["docker-compose.postgres.yml"]},{"name":"Other — docker-compose.self-contained.yml","slug":"other-docker-compose-self-contained-yml","files":["docker-compose.self-contained.yml"]},{"name":"Other — docker-compose.yml","slug":"other-docker-compose-yml","files":["docker-compose.yml"]},{"name":"Other — docker-backend","slug":"other-docker-backend","files":["docker/backend/Dockerfile","docker/backend/Dockerfile.dev"]},{"name":"Other — docker-engine","slug":"other-docker-engine","files":["docker/engine/Dockerfile"]},{"name":"Other — docker-ui","slug":"other-docker-ui","files":["docker/ui/Dockerfile","docker/ui/Dockerfile.dev","docker/ui/Dockerfile.self-contained","docker/ui/nginx.conf","docker/ui/nginx.self-contained.conf"]},{"name":"Other — docs","slug":"other-docs","files":["docs/SDS-traceability-matrix.md","docs/SDS_Planification_CM.md"]},{"name":"Other — engine","slug":"other-engine","files":["engine/Cargo.toml"]},{"name":"Other — pyproject.toml","slug":"other-pyproject-toml","files":["pyproject.toml"]},{"name":"Other — src-cm_plan","slug":"other-src-cm-plan","files":["src/cm_plan/api/__init__.py"]},{"name":"Other — tests","slug":"other-tests","files":["tests/__init__.py","tests/conftest.py","tests/test_api.py","tests/test_auth.py","tests/test_constraints.py","tests/test_diff.py","tests/test_equity.py","tests/test_exports.py","tests/test_filters.py","tests/test_imports.py","tests/test_persistence.py","tests/test_resilience.py","tests/test_rotation.py","tests/test_rules.py","tests/test_simulation.py","tests/test_solver.py","tests/test_types.py"]},{"name":"Other — ui","slug":"other-ui","files":["ui/package.json","ui/svelte.config.js","ui/tsconfig.json","ui/vite.config.ts","ui/vitest.config.ts"]},{"name":"Other — ui-scripts","slug":"other-ui-scripts","files":["ui/scripts/build-engine-wasm.mjs","ui/scripts/bundle-single.js","ui/scripts/verify-engine-wasm-release.mjs"]},{"name":"Other — ui-src","slug":"other-ui-src","files":["ui/src/lib/wasm/package.json"]}]}]};
(function() {
var activePage = 'overview';
document.addEventListener('DOMContentLoaded', function() {
mermaid.initialize({ startOnLoad: false, theme: 'neutral', securityLevel: 'loose' });
renderMeta();
renderNav();
document.getElementById('menu-toggle').addEventListener('click', function() {
document.getElementById('sidebar').classList.toggle('open');
});
if (location.hash && location.hash.length > 1) {
activePage = decodeURIComponent(location.hash.slice(1));
}
navigateTo(activePage);
});
function renderMeta() {
if (!META) return;
var el = document.getElementById('meta-info');
var parts = [];
if (META.generatedAt) {
parts.push(new Date(META.generatedAt).toLocaleDateString());
}
if (META.model) parts.push(META.model);
if (META.fromCommit) parts.push(META.fromCommit.slice(0, 8));
el.textContent = parts.join(' \u00b7 ');
}
function renderNav() {
var container = document.getElementById('nav-tree');
var html = '<div class="nav-section">';
html += '<a class="nav-item overview" data-page="overview" href="#overview">Overview</a>';
html += '</div>';
if (TREE.length > 0) {
html += '<div class="nav-group-label">Modules</div>';
html += buildNavTree(TREE);
}
container.innerHTML = html;
container.addEventListener('click', function(e) {
var target = e.target;
while (target && !target.dataset.page) { target = target.parentElement; }
if (target && target.dataset.page) {
e.preventDefault();
navigateTo(target.dataset.page);
}
});
}
function buildNavTree(nodes) {
var html = '';
for (var i = 0; i < nodes.length; i++) {
var node = nodes[i];
html += '<div class="nav-section">';
html += '<a class="nav-item" data-page="' + escH(node.slug) + '" href="#' + encodeURIComponent(node.slug) + '">' + escH(node.name) + '</a>';
if (node.children && node.children.length > 0) {
html += '<div class="nav-children">' + buildNavTree(node.children) + '</div>';
}
html += '</div>';
}
return html;
}
function escH(s) {
var d = document.createElement('div');
d.textContent = s;
return d.innerHTML;
}
function navigateTo(page) {
activePage = page;
location.hash = encodeURIComponent(page);
var items = document.querySelectorAll('.nav-item');
for (var i = 0; i < items.length; i++) {
if (items[i].dataset.page === page) {
items[i].classList.add('active');
} else {
items[i].classList.remove('active');
}
}
var contentEl = document.getElementById('content');
var md = PAGES[page];
if (!md) {
contentEl.innerHTML = '<div class="empty-state"><h2>Page not found</h2><p>' + escH(page) + '.md does not exist.</p></div>';
return;
}
contentEl.innerHTML = marked.parse(md);
// Rewrite .md links to hash navigation
var links = contentEl.querySelectorAll('a[href]');
for (var i = 0; i < links.length; i++) {
var href = links[i].getAttribute('href');
if (href && href.endsWith('.md') && href.indexOf('://') === -1) {
var slug = href.replace(/\.md$/, '');
links[i].setAttribute('href', '#' + encodeURIComponent(slug));
(function(s) {
links[i].addEventListener('click', function(e) {
e.preventDefault();
navigateTo(s);
});
})(slug);
}
}
// Convert mermaid code blocks into mermaid divs
var mermaidBlocks = contentEl.querySelectorAll('pre code.language-mermaid');
for (var i = 0; i < mermaidBlocks.length; i++) {
var pre = mermaidBlocks[i].parentElement;
var div = document.createElement('div');
div.className = 'mermaid';
div.textContent = mermaidBlocks[i].textContent;
pre.parentNode.replaceChild(div, pre);
}
try { mermaid.run({ querySelector: '.mermaid' }); } catch(e) {}
window.scrollTo(0, 0);
document.getElementById('sidebar').classList.remove('open');
}
})();
</script>
</body>
</html>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment