Last active
May 4, 2026 22:38
-
-
Save limcheekin/c1c55bb5dcdc3fcf428250cea0f404ca to your computer and use it in GitHub Desktop.
Control-Center Repository Wiki — generated by GitNexus
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| <!DOCTYPE html> | |
| <html lang="en"> | |
| <head> | |
| <meta charset="UTF-8"> | |
| <meta name="viewport" content="width=device-width, initial-scale=1.0"> | |
| <title>control-center — Wiki</title> | |
| <script src="https://cdn.jsdelivr.net/npm/marked@11.0.0/marked.min.js"></script> | |
| <script src="https://cdn.jsdelivr.net/npm/mermaid@11/dist/mermaid.min.js"></script> | |
| <style> | |
| *{margin:0;padding:0;box-sizing:border-box} | |
| :root{ | |
| --bg:#ffffff;--sidebar-bg:#f8f9fb;--border:#e5e7eb; | |
| --text:#1e293b;--text-muted:#64748b;--primary:#2563eb; | |
| --primary-soft:#eff6ff;--hover:#f1f5f9;--code-bg:#f1f5f9; | |
| --radius:8px;--shadow:0 1px 3px rgba(0,0,0,.08); | |
| } | |
| body{font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,sans-serif; | |
| line-height:1.65;color:var(--text);background:var(--bg)} | |
| .layout{display:flex;min-height:100vh} | |
| .sidebar{width:280px;background:var(--sidebar-bg);border-right:1px solid var(--border); | |
| position:fixed;top:0;left:0;bottom:0;overflow-y:auto;padding:24px 16px; | |
| display:flex;flex-direction:column;z-index:10} | |
| .content{margin-left:280px;flex:1;padding:48px 64px;max-width:960px} | |
| .sidebar-header{margin-bottom:20px;padding-bottom:16px;border-bottom:1px solid var(--border)} | |
| .sidebar-title{font-size:16px;font-weight:700;color:var(--text);display:flex;align-items:center;gap:8px} | |
| .sidebar-title svg{flex-shrink:0} | |
| .sidebar-meta{font-size:11px;color:var(--text-muted);margin-top:6px} | |
| .nav-section{margin-bottom:2px} | |
| .nav-item{display:block;padding:7px 12px;border-radius:var(--radius);cursor:pointer; | |
| font-size:13px;color:var(--text);text-decoration:none;transition:all .15s; | |
| white-space:nowrap;overflow:hidden;text-overflow:ellipsis} | |
| .nav-item:hover{background:var(--hover)} | |
| .nav-item.active{background:var(--primary-soft);color:var(--primary);font-weight:600} | |
| .nav-item.overview{font-weight:600;margin-bottom:4px} | |
| .nav-children{padding-left:14px;border-left:1px solid var(--border);margin-left:12px} | |
| .nav-group-label{font-size:11px;font-weight:600;color:var(--text-muted); | |
| text-transform:uppercase;letter-spacing:.5px;padding:12px 12px 4px;user-select:none} | |
| .sidebar-footer{margin-top:auto;padding-top:16px;border-top:1px solid var(--border); | |
| font-size:11px;color:var(--text-muted);text-align:center} | |
| .content h1{font-size:28px;font-weight:700;margin-bottom:8px;line-height:1.3} | |
| .content h2{font-size:22px;font-weight:600;margin:32px 0 12px;padding-bottom:6px;border-bottom:1px solid var(--border)} | |
| .content h3{font-size:17px;font-weight:600;margin:24px 0 8px} | |
| .content h4{font-size:15px;font-weight:600;margin:20px 0 6px} | |
| .content p{margin:12px 0} | |
| .content ul,.content ol{margin:12px 0 12px 24px} | |
| .content li{margin:4px 0} | |
| .content a{color:var(--primary);text-decoration:none} | |
| .content a:hover{text-decoration:underline} | |
| .content blockquote{border-left:3px solid var(--primary);padding:8px 16px;margin:16px 0; | |
| background:var(--primary-soft);border-radius:0 var(--radius) var(--radius) 0; | |
| color:var(--text-muted);font-size:14px} | |
| .content code{font-family:'SF Mono',Consolas,'Courier New',monospace;font-size:13px; | |
| background:var(--code-bg);padding:2px 6px;border-radius:4px} | |
| .content pre{background:#1e293b;color:#e2e8f0;border-radius:var(--radius);padding:16px; | |
| overflow-x:auto;margin:16px 0} | |
| .content pre code{background:none;padding:0;font-size:13px;line-height:1.6;color:inherit} | |
| .content table{border-collapse:collapse;width:100%;margin:16px 0} | |
| .content th,.content td{border:1px solid var(--border);padding:8px 12px;text-align:left;font-size:14px} | |
| .content th{background:var(--sidebar-bg);font-weight:600} | |
| .content img{max-width:100%;border-radius:var(--radius)} | |
| .content hr{border:none;border-top:1px solid var(--border);margin:32px 0} | |
| .content .mermaid{margin:20px 0;text-align:center} | |
| .menu-toggle{display:none;position:fixed;top:12px;left:12px;z-index:20; | |
| background:var(--bg);border:1px solid var(--border);border-radius:var(--radius); | |
| padding:8px 12px;cursor:pointer;font-size:18px;box-shadow:var(--shadow)} | |
| @media(max-width:768px){ | |
| .sidebar{transform:translateX(-100%);transition:transform .2s} | |
| .sidebar.open{transform:translateX(0);box-shadow:2px 0 12px rgba(0,0,0,.1)} | |
| .content{margin-left:0;padding:24px 20px;padding-top:56px} | |
| .menu-toggle{display:block} | |
| } | |
| .empty-state{text-align:center;padding:80px 20px;color:var(--text-muted)} | |
| .empty-state h2{font-size:20px;margin-bottom:8px;border:none} | |
| </style> | |
| </head> | |
| <body> | |
| <button class="menu-toggle" id="menu-toggle" aria-label="Toggle menu">☰</button> | |
| <div class="layout"> | |
| <nav class="sidebar" id="sidebar"> | |
| <div class="sidebar-header"> | |
| <div class="sidebar-title"> | |
| <svg width="18" height="18" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"><path d="M2 3h6a4 4 0 014 4v14a3 3 0 00-3-3H2z"/><path d="M22 3h-6a4 4 0 00-4 4v14a3 3 0 013-3h7z"/></svg> | |
| control-center | |
| </div> | |
| <div class="sidebar-meta" id="meta-info"></div> | |
| </div> | |
| <div id="nav-tree"></div> | |
| <div class="sidebar-footer">Generated by GitNexus</div> | |
| </nav> | |
| <main class="content" id="content"> | |
| <div class="empty-state"><h2>Loading…</h2></div> | |
| </main> | |
| </div> | |
| <script> | |
| var PAGES = {"application-core":"# Application Core\n\n# Application Core Module\n\n## Overview\n\nThe Application Core module (`AppModule`) serves as the root module of the NestJS application, orchestrating all feature modules, infrastructure services, and configuration. It bootstraps the application, establishes database connections, configures global settings, and provides a unified entry point for all functionality.\n\n## Architecture\n\n```mermaid\ngraph TB\n subgraph \"Entry Point\"\n M[main.ts]\n end\n \n subgraph \"AppModule\"\n Config[ConfigModule]\n DB[Database Layer]\n I18n[I18nModule]\n App[Feature Modules]\n end\n \n subgraph \"Feature Modules\"\n UM[UsersModule]\n FM[FilesModule]\n AuthM[AuthModule]\n AuthF[AuthFacebookModule]\n AuthG[AuthGoogleModule]\n AuthA[AuthAppleModule]\n SM[SessionModule]\n MailM[MailModule]\n MailerM[MailerModule]\n HM[HomeModule]\n end\n \n M --> Config\n Config --> DB\n Config --> I18n\n DB --> App\n I18n --> App\n \n App --> UM\n App --> FM\n App --> AuthM\n App --> AuthF\n App --> AuthG\n App --> AuthA\n App --> SM\n App --> MailM\n App --> MailerM\n App --> HM\n```\n\n## Key Components\n\n### AppModule (`src/app.module.ts`)\n\nThe root module that imports and configures all application components. It performs the following setup:\n\n1. **Configuration Loading** - Loads environment-based configuration from multiple config files:\n - `database.config` - Database connection settings\n - `auth.config` - Authentication configuration\n - `app.config` - General application settings\n - `mail.config` - Email configuration\n - `file.config` - File storage settings\n - `facebook.config`, `google.config`, `apple.config` - OAuth provider configs\n\n2. **Database Initialization** - Supports both SQL (TypeORM) and document (Mongoose) databases based on configuration:\n ```typescript\n const infrastructureDatabaseModule = (databaseConfig() as DatabaseConfig)\n .isDocumentDatabase\n ? MongooseModule.forRootAsync({...}\n : TypeOrmModule.forRootAsync{...}\n ```\n\n3. **Internationalization** - Configures i18n with fallback language support and header-based language resolution\n\n4. **Feature Module Imports** - Imports all domain and infrastructure modules\n\n### Bootstrap (`src/main.ts`)\n\nThe application entry point that initializes the NestJS runtime:\n\n| Setup | Purpose |\n|-------|---------|\n| CORS | Enables cross-origin requests |\n| Global Prefix | Adds API prefix to all routes (configured via `app.apiPrefix`) |\n| Versioning | Enables URI-based API versioning |\n| Validation Pipe | Applies class-validator rules globally |\n| Interceptors | Handles promise resolution and serialization |\n| Swagger | Generates API documentation at `/docs` |\n\n### Home Module\n\nA minimal feature module providing the root endpoint:\n\n- **`HomeController`** - Handles `GET /` requests, returning application info\n- **`HomeService`** - Retrieves application name from configuration\n- **`HomeModule`** - Registers controller and provider\n\n## Configuration\n\nThe application uses a hierarchical configuration system:\n\n```typescript\nConfigModule.forRoot({\n isGlobal: true,\n load: [\n databaseConfig,\n authConfig,\n appConfig,\n mailConfig,\n fileConfig,\n facebookConfig,\n googleConfig,\n appleConfig,\n ],\n envFilePath: ['.env'],\n})\n```\n\nEach config file exports a configuration factory that returns typed configuration objects. The `ConfigService` provides type-safe access throughout the application via `AllConfigType`.\n\n## Request Pipeline\n\n```\nHTTP Request\n │\n ▼\nCORS Middleware\n │\n ▼\nGlobal Prefix (e.g., /api)\n │\n ▼\nVersioning (e.g., /api/v1)\n │\n ▼\nValidationPipe (DTO validation)\n │\n ▼\nResolvePromisesInterceptor (resolves async properties)\n │\n ▼\nClassSerializerInterceptor (transforms responses)\n │\n ▼\nController / Service\n```\n\n## Integration Points\n\n### Database Layer\n\nThe module dynamically selects the database driver:\n- **TypeORM** - For SQL databases (PostgreSQL, MySQL, etc.)\n- **Mongoose** - For MongoDB\n\nThe selection is determined by `databaseConfig().isDocumentDatabase`.\n\n### Authentication\n\nMultiple authentication strategies are registered:\n- Local authentication via `AuthModule`\n- OAuth via Facebook, Google, and Apple modules\n\n### Internationalization\n\nThe `I18nModule` is configured with:\n- Fallback language from config\n- Language files in `./src/i18n/`\n- Language detection via custom HTTP header (configurable via `app.headerLanguage`)\n\n## Environment Variables\n\nKey environment variables used by the core:\n\n| Variable | Purpose |\n|----------|---------|\n| `APP_API_PREFIX` | Global route prefix |\n| `APP_PORT` | Server port |\n| `APP_HEADER_LANGUAGE` | Header name for language detection |\n| `APP_FALLBACK_LANGUAGE` | Default language when none specified |\n\n## Swagger Documentation\n\nAPI documentation is automatically generated and available at `/docs`. The setup includes:\n- Bearer authentication token support\n- Global language parameter\n- Auto-generated schema from DTOs","authentication":"# Authentication\n\n# Authentication Module\n\n## Overview\n\nThe Authentication module provides comprehensive user authentication and authorization capabilities for the application. It supports multiple authentication methods including email/password login, social authentication (Google, Facebook, Apple), session management with JWT tokens, email verification, and password reset functionality.\n\nThe module is built on NestJS and uses Passport.js for JWT strategy implementation, with support for both relational (TypeORM) and document (Mongoose) database backends.\n\n## Architecture\n\nThe module consists of several interconnected components:\n\n```mermaid\ngraph TB\n subgraph \"API Layer\"\n AC[AuthController]\n AFC[AuthFacebookController]\n AGC[AuthGoogleController]\n AAC[AuthAppleController]\n end\n\n subgraph \"Service Layer\"\n AS[AuthService]\n AFS[AuthFacebookService]\n AGS[AuthGoogleService]\n AAS[AuthAppleService]\n end\n\n subgraph \"Strategies\"\n JWT[JwtStrategy]\n RJWT[JwtRefreshStrategy]\n ANON[AnonymousStrategy]\n end\n\n subgraph \"Session Management\"\n SS[SessionService]\n SR[SessionRepository]\n end\n\n subgraph \"External Services\"\n MAIL[MailService]\n US[UsersService]\n end\n\n AC --> AS\n AFC --> AFS\n AGC --> AGS\n AAC --> AAS\n\n AFS --> AS\n AGS --> AS\n AAS --> AS\n\n AS --> JWT\n AS --> RJWT\n AS --> SS\n AS --> US\n AS --> MAIL\n\n SS --> SR\n```\n\n## Core Components\n\n### AuthController\n\nThe `AuthController` (`src/auth/auth.controller.ts`) exposes REST endpoints for all authentication operations. It handles the HTTP layer, validation via DTOs, and delegates business logic to `AuthService`.\n\n**Endpoints:**\n\n| Method | Path | Description | Auth Required |\n|--------|------|-------------|---------------|\n| POST | `/auth/email/login` | Email/password login | No |\n| POST | `/auth/email/register` | User registration | No |\n| POST | `/auth/email/confirm` | Confirm email address | No |\n| POST | `/auth/email/confirm/new` | Confirm new email change | No |\n| POST | `/auth/forgot/password` | Initiate password reset | No |\n| POST | `/auth/reset/password` | Reset password with token | No |\n| GET | `/auth/me` | Get current user | Yes |\n| POST | `/auth/refresh` | Refresh access token | Yes |\n| POST | `/auth/logout` | Logout and invalidate session | Yes |\n| PATCH | `/auth/me` | Update current user profile | Yes |\n| DELETE | `/auth/me` | Soft delete current user | Yes |\n\n### AuthService\n\nThe `AuthService` (`src/auth/auth.service.ts`) contains all authentication business logic. It orchestrates interactions between users, sessions, tokens, and email services.\n\n**Key Methods:**\n\n- `validateLogin(loginDto)` — Validates email/password credentials, creates a session, and returns JWT tokens\n- `validateSocialLogin(authProvider, socialData)` — Handles social authentication, creating or linking user accounts\n- `register(dto)` — Creates a new user with inactive status and sends confirmation email\n- `confirmEmail(hash)` — Activates user account after email confirmation\n- `confirmNewEmail(hash)` — Updates user email after confirmation\n- `forgotPassword(email)` — Initiates password reset flow\n- `resetPassword(hash, password)` — Completes password reset\n- `me(userJwtPayload)` — Retrieves current user profile\n- `update(userJwtPayload, userDto)` — Updates user profile, handles password changes and email changes\n- `refreshToken(data)` — Issues new tokens from refresh token\n- `logout(data)` — Invalidates session\n- `softDelete(user)` — Soft deletes user account\n\n**Token Generation:**\n\nThe private `getTokensData` method generates both access and refresh tokens:\n\n```typescript\nprivate async getTokensData(data: {\n id: User['id'];\n role: User['role'];\n sessionId: Session['id'];\n hash: Session['hash'];\n})\n```\n\n- **Access Token**: Signed with `auth.secret`, contains user ID, role, and session ID\n- **Refresh Token**: Signed with `auth.refreshSecret`, contains session ID and hash for session validation\n\n### JWT Strategies\n\nThe module implements three Passport strategies in `src/auth/strategies/`:\n\n**JwtStrategy** (`jwt`)\n- Validates access tokens for protected endpoints\n- Extracts payload: `{ id, role, sessionId, iat, exp }`\n- Throws `UnauthorizedException` if payload lacks `id`\n\n**JwtRefreshStrategy** (`jwt-refresh`)\n- Validates refresh tokens\n- Extracts payload: `{ sessionId, hash, iat, exp }`\n- Throws `UnauthorizedException` if sessionId is missing\n\n**AnonymousStrategy**\n- Allows unauthenticated access\n- Returns the request object unchanged\n\n## Authentication Flows\n\n### Email/Password Login\n\n```mermaid\nsequenceDiagram\n Client->>AuthController: POST /auth/email/login\n AuthController->>AuthService: validateLogin(dto)\n AuthService->>UsersService: findByEmail(email)\n \n alt User not found\n AuthService-->>Client: 422 - email not found\n else User found\n AuthService->>AuthService: Validate provider is email\n AuthService->>AuthService: Validate password with bcrypt.compare\n alt Invalid password\n AuthService-->>Client: 422 - incorrect password\n else Valid\n AuthService->>SessionService: create({ user, hash })\n AuthService->>AuthService: getTokensData()\n AuthService-->>Client: { token, refreshToken, tokenExpires, user }\n end\n end\n```\n\n### Registration with Email Confirmation\n\n1. User submits registration data via `POST /auth/email/register`\n2. `AuthService.register()` creates user with `StatusEnum.inactive`\n3. JWT confirmation token is generated with `confirmEmailSecret`\n4. Confirmation email sent via `MailService.userSignUp()`\n5. User clicks confirmation link\n6. `AuthService.confirmEmail()` verifies JWT, updates status to `StatusEnum.active`\n\n### Social Authentication\n\nEach social provider follows this pattern:\n\n1. Client sends provider's ID token/access token to the app\n2. Provider service (`AuthGoogleService`, `AuthFacebookService`, `AuthAppleService`) validates the token with the provider\n3. Returns normalized `SocialInterface` data\n4. `AuthService.validateSocialLogin()` handles account creation/linking:\n - If social ID exists → update email if needed\n - If email exists → link social account\n - If neither exists → create new user with social data\n\n### Token Refresh\n\n1. Client sends expired access token + valid refresh token\n2. `JwtRefreshStrategy` validates refresh token, extracts `sessionId` and `hash`\n3. `AuthService.refreshToken()` verifies session exists with matching hash\n4. New session hash generated, new tokens issued\n5. Returns `{ token, refreshToken, tokenExpires }`\n\n### Password Reset\n\n1. User requests reset via `POST /auth/forgot/password`\n2. If email exists, JWT reset token sent via email\n3. User submits new password + token via `POST /auth/reset/password`\n4. `AuthService.resetPassword()` verifies token, updates password\n5. All existing sessions for user are invalidated\n\n## Session Management\n\nThe `SessionModule` (`src/session/`) manages user sessions across the application.\n\n### Session Entity\n\n```typescript\nclass Session {\n id: number | string;\n user: User;\n hash: string; // Rotated on each token refresh\n createdAt: Date;\n updatedAt: Date;\n deletedAt: Date;\n}\n```\n\n### Session Repository\n\nAbstracted for database portability with implementations for:\n- **Relational**: `SessionRelationalRepository` (TypeORM)\n- **Document**: `SessionDocumentRepository` (Mongoose)\n\nKey operations:\n- `findById(id)` — Retrieve session by ID\n- `create(data)` — Create new session\n- `updateByHash(conditions, payload)` — Update session with hash verification\n- `deleteById(id)` — Delete specific session\n- `deleteByUserId(userId)` — Delete all user sessions\n- `deleteByUserIdWithExclude(userId, excludeSessionId)` — Delete all except current\n\n### Session Hash Rotation\n\nEach token refresh generates a new session hash. This invalidates any previously issued refresh tokens, providing session rotation security.\n\n## Data Transfer Objects\n\n### Request DTOs\n\n| DTO | Purpose |\n|-----|---------|\n| `AuthEmailLoginDto` | Email + password for login |\n| `AuthRegisterLoginDto` | Email, password, firstName, lastName for registration |\n| `AuthConfirmEmailDto` | Hash for email confirmation |\n| `AuthForgotPasswordDto` | Email for password reset request |\n| `AuthResetPasswordDto` | New password + hash for reset |\n| `AuthUpdateDto` | Profile updates (photo, name, email, password) |\n| `AuthGoogleLoginDto` | Google ID token |\n| `AuthFacebookLoginDto` | Facebook access token |\n| `AuthAppleLoginDto` | Apple ID token + name |\n\n### Response DTOs\n\n| DTO | Purpose |\n|-----|---------|\n| `LoginResponseDto` | `{ token, refreshToken, tokenExpires, user }` |\n| `RefreshResponseDto` | `{ token, refreshToken, tokenExpires }` |\n\n## Configuration\n\n### Auth Config (`src/auth/config/auth.config.ts`)\n\nEnvironment variables required:\n\n```typescript\nAUTH_JWT_SECRET // Secret for access token signing\nAUTH_JWT_TOKEN_EXPIRES_IN // Access token expiration (e.g., \"1d\")\nAUTH_REFRESH_SECRET // Secret for refresh token signing\nAUTH_REFRESH_TOKEN_EXPIRES_IN // Refresh token expiration (e.g., \"7d\")\nAUTH_FORGOT_SECRET // Secret for password reset tokens\nAUTH_FORGOT_TOKEN_EXPIRES_IN // Password reset token expiration\nAUTH_CONFIRM_EMAIL_SECRET // Secret for email confirmation tokens\nAUTH_CONFIRM_EMAIL_TOKEN_EXPIRES_IN // Confirmation token expiration\n```\n\n### Social Provider Config\n\nEach social provider has its own config:\n\n- **Google** (`src/auth-google/config/google.config.ts`): `GOOGLE_CLIENT_ID`, `GOOGLE_CLIENT_SECRET`\n- **Facebook** (`src/auth-facebook/config/facebook.config.ts`): `FACEBOOK_APP_ID`, `FACEBOOK_APP_SECRET`\n- **Apple** (`src/auth-apple/config/apple.config.ts`): `APPLE_APP_AUDIENCE`\n\n## Module Dependencies\n\n```mermaid\ngraph LR\n AuthModule --> UsersModule\n AuthModule --> SessionModule\n AuthModule --> MailModule\n AuthModule --> PassportModule\n AuthModule --> JwtModule\n \n AuthFacebookModule --> AuthModule\n AuthGoogleModule --> AuthModule\n AuthAppleModule --> AuthModule\n```\n\n- **UsersModule**: User data and operations\n- **SessionModule**: Session persistence\n- **MailModule**: Email sending\n- **PassportModule**: JWT strategy infrastructure\n- **JwtModule**: JWT signing/verification\n\n## Security Considerations\n\n1. **Password Storage**: Passwords are hashed with bcrypt before storage\n2. **Token Secrets**: Separate secrets for access and refresh tokens\n3. **Session Binding**: Tokens are bound to sessions, enabling revocation\n4. **Hash Rotation**: Session hash rotates on each refresh to detect token reuse\n5. **Email Validation**: Email changes require confirmation before activation\n6. **Password Changes**: Require old password verification, invalidate other sessions\n7. **Provider Validation**: Social tokens validated against provider APIs before use\n\n## Extending the Module\n\nTo add a new social provider:\n\n1. Create provider config in `src/auth-{provider}/config/`\n2. Create service to validate provider tokens in `src/auth-{provider}/auth-{provider}.service.ts`\n3. Create controller endpoint in `src/auth-{provider}/auth-{provider}.controller.ts`\n4. Create module in `src/auth-{provider}/auth-{provider}.module.ts`\n5. Import `AuthModule` to access `validateSocialLogin()`","database-layer":"# Database Layer\n\n# Database Layer Module\n\nThe Database Layer module provides unified database configuration and persistence abstractions for both relational (PostgreSQL via TypeORM) and document (MongoDB via Mongoose) databases. It supports environment-driven configuration, SSL connections, connection pooling, and seed data management.\n\n## Architecture Overview\n\nThe module implements a dual-database strategy allowing the application to run against either a relational database (PostgreSQL) or a document database (MongoDB) based on configuration.\n\n```mermaid\ngraph TB\n subgraph Configuration\n DC[database.config.ts]\n AC[app.config.ts]\n end\n \n subgraph \"Data Sources\"\n TCS[TypeOrmConfigService]\n MCS[MongooseConfigService]\n end\n \n subgraph Persistence\n subgraph Relational\n DE[UserEntity<br/>RoleEntity<br/>StatusEntity]\n end\n subgraph Document\n DS[UserSchema<br/>RoleSchema<br/>StatusSchema]\n end\n end\n \n subgraph Seeds\n RS[relational seeds<br/>TypeORM]\n DS2[document seeds<br/>Mongoose]\n end\n \n DC --> TCS\n DC --> MCS\n TCS --> DE\n MCS --> DS\n RS --> DE\n DS2 --> DS\n```\n\n## Configuration System\n\n### DatabaseConfig Type\n\nThe `DatabaseConfig` type defines all supported configuration options:\n\n```typescript\n// src/database/config/database-config.type.ts\nexport type DatabaseConfig = {\n isDocumentDatabase: boolean; // true for MongoDB\n url?: string; // Connection URL (alternative to host/port)\n type?: string; // Database type (postgres, mongodb, etc.)\n host?: string;\n port?: number;\n password?: string;\n name?: string; // Database name\n username?: string;\n synchronize?: boolean; // TypeORM schema sync (dev only)\n maxConnections: number; // Connection pool size\n sslEnabled?: boolean;\n rejectUnauthorized?: boolean;\n ca?: string; // SSL certificate authority\n key?: string; // SSL private key\n cert?: string; // SSL certificate\n};\n```\n\n### Configuration Factory\n\nThe `database.config.ts` registers database configuration with NestJS ConfigModule and validates environment variables at startup:\n\n```typescript\n// src/database/config/database.config.ts\nexport default registerAs<DatabaseConfig>('database', () => {\n validateConfig(process.env, EnvironmentVariablesValidator);\n\n return {\n isDocumentDatabase: ['mongodb'].includes(process.env.DATABASE_TYPE ?? ''),\n url: process.env.DATABASE_URL,\n // ... other fields with sensible defaults\n maxConnections: process.env.DATABASE_MAX_CONNECTIONS \n ? parseInt(process.env.DATABASE_MAX_CONNECTIONS, 10) \n : 100,\n };\n});\n```\n\nKey validation rules:\n- **URL-based connection**: If `DATABASE_URL` is provided, individual host/port credentials are optional\n- **SSL**: Only applied when `DATABASE_SSL_ENABLED=true`, with optional CA, key, and cert\n- **Port range**: Validated as integer between 0-65535\n- **Defaults**: Port 5432 (PostgreSQL default), maxConnections 100\n\n## Data Source Services\n\n### TypeOrmConfigService\n\nProvides TypeORM configuration for relational databases:\n\n```typescript\n// src/database/typeorm-config.service.ts\n@Injectable()\nexport class TypeOrmConfigService implements TypeOrmOptionsFactory {\n createTypeOrmOptions(): TypeOrmModuleOptions {\n return {\n type: this.configService.get('database.type', { infer: true }),\n url: this.configService.get('database.url', { infer: true }),\n host: this.configService.get('database.host', { infer: true }),\n // ...\n extra: {\n max: this.configService.get('database.maxConnections', { infer: true }),\n ssl: this.configService.get('database.sslEnabled', { infer: true })\n ? { rejectUnauthorized, ca, key, cert }\n : undefined,\n },\n };\n }\n}\n```\n\nConfiguration is read from the NestJS ConfigService, enabling runtime configuration without environment variable re-reading.\n\n### MongooseConfigService\n\nProvides Mongoose configuration for MongoDB:\n\n```typescript\n// src/database/mongoose-config.service.ts\n@Injectable()\nexport class MongooseConfigService implements MongooseOptionsFactory {\n createMongooseOptions(): MongooseModuleOptions {\n return {\n uri: this.configService.get('database.url', { infer: true }),\n dbName: this.configService.get('database.name', { infer: true }),\n user: this.configService.get('database.username', { infer: true }),\n pass: this.configService.get('database.password', { infer: true }),\n connectionFactory(connection) {\n connection.plugin(mongooseAutoPopulate);\n return connection;\n },\n };\n }\n}\n```\n\nThe service applies `mongoose-autopopulate` plugin automatically to enable automatic population of referenced documents.\n\n### Raw DataSource (Legacy)\n\nThe `data-source.ts` file provides a raw TypeORM DataSource instance that reads directly from environment variables. This is used by the TypeORM CLI for migrations and seeding:\n\n```typescript\n// src/database/data-source.ts\nexport const AppDataSource = new DataSource({\n type: process.env.DATABASE_TYPE,\n url: process.env.DATABASE_URL,\n // ...\n entities: [__dirname + '/../**/*.entity{.ts,.js}'],\n migrations: [__dirname + '/migrations/**/*{.ts,.js}'],\n} as DataSourceOptions);\n```\n\n## Database Type Detection\n\nThe `isDocumentDatabase` flag is automatically determined from the database type:\n\n```typescript\nisDocumentDatabase: ['mongodb'].includes(process.env.DATABASE_TYPE ?? ''),\n```\n\nThis flag is used throughout the application to conditionally load either:\n- TypeORM modules for relational databases\n- Mongoose modules for MongoDB\n\n## Migrations\n\nMigrations are stored in `src/database/migrations/` and follow TypeORM's migration format. The example migration `CreateUser.ts` creates the core tables:\n\n- **role**: User roles (admin, user, etc.)\n- **status**: User status (active, inactive, etc.)\n- **file**: File/attachment storage\n- **user**: Main user entity with email, password, social auth fields\n- **session**: User sessions for authentication\n\nEach table includes appropriate indexes and foreign key constraints.\n\n## Seed Data\n\nThe module provides seed services for both database types to populate initial data.\n\n### Relational Seeds (TypeORM)\n\nLocated in `src/database/seeds/relational/`:\n\n| Service | Purpose |\n|---------|---------|\n| `RoleSeedService` | Creates admin and user roles |\n| `StatusSeedService` | Creates active and inactive statuses |\n| `UserSeedService` | Creates admin and regular user accounts |\n\nThe seed creates:\n- **Admin user**: `admin@example.com` / `secret`\n- **Regular user**: `john.doe@example.com` / `secret`\n\n### Document Seeds (Mongoose)\n\nLocated in `src/database/seeds/document/`:\n\n| Service | Purpose |\n|---------|---------|\n| `UserSeedService` | Creates admin and regular user documents |\n\n### Running Seeds\n\nExecute seeds via the run-seed scripts:\n\n```bash\n# For relational database\nnpm run seed:relational\n\n# For document database\nnpm run seed:document\n```\n\nThe seed modules configure their own database connections independently of the main application.\n\n## Environment Variables\n\n### Required Variables\n\n| Variable | Description | Example |\n|----------|-------------|---------|\n| `DATABASE_TYPE` | Database driver | `postgres`, `mongodb` |\n| `DATABASE_HOST` | Database host | `localhost` |\n| `DATABASE_PORT` | Database port | `5432` |\n| `DATABASE_NAME` | Database name | `app_db` |\n| `DATABASE_USERNAME` | Database user | `admin` |\n| `DATABASE_PASSWORD` | Database password | `secret` |\n\n### Optional Variables\n\n| Variable | Default | Description |\n|----------|---------|-------------|\n| `DATABASE_URL` | - | Full connection URL (alternative to host/port) |\n| `DATABASE_SYNCHRONIZE` | `false` | Auto-sync schema (dev only!) |\n| `DATABASE_MAX_CONNECTIONS` | `100` | Connection pool size |\n| `DATABASE_SSL_ENABLED` | `false` | Enable SSL |\n| `DATABASE_REJECT_UNAUTHORIZED` | - | Reject invalid SSL certs |\n| `DATABASE_CA` | - | SSL CA certificate |\n| `DATABASE_KEY` | - | SSL private key |\n| `DATABASE_CERT` | - | SSL certificate |\n\n### SSL Configuration Example\n\n```bash\nDATABASE_SSL_ENABLED=true\nDATABASE_REJECT_UNAUTHORIZED=true\nDATABASE_CA=-----BEGIN CERTIFICATE-----\n...\n-----END CERTIFICATE-----\n```\n\n## Integration with the Application\n\nThe database layer integrates with other modules through:\n\n1. **ConfigModule**: Loads `database.config.ts` and exposes `database.*` config keys\n2. **TypeOrmModule.forRootAsync()**: Uses `TypeOrmConfigService` for relational databases\n3. **MongooseModule.forRootAsync()**: Uses `MongooseConfigService` for MongoDB\n4. **Entity/Schema modules**: Each domain entity (users, roles, statuses) provides both TypeORM entities and Mongoose schemas\n\nThe application determines which persistence layer to use based on `DATABASE_TYPE`:\n- Set to `postgres` (or other SQL) → uses TypeORM\n- Set to `mongodb` → uses Mongoose","email-service":"# Email Service\n\n# Email Service Module\n\nThe Email Service module provides transactional email functionality for the application. It handles account activation, password reset, and email change confirmation emails using SMTP with Handlebars templates for rendering.\n\n## Architecture Overview\n\nThe module is split into two related parts:\n\n- **Mail Module** (`src/mail/`) — Business logic for specific email types (signup, password reset, etc.)\n- **Mailer Module** (`src/mailer/`) — Low-level SMTP transport and template rendering\n\n```mermaid\ngraph TD\n MailModule --> MailerModule\n MailService --> MailerService\n MailService --> ConfigService\n MailerService --> ConfigService\n \n subgraph \"Mail Module\"\n MailService\n MailData\n TemplateFiles[\"activation.hbs<br/>reset-password.hbs<br/>confirm-new-email.hbs\"]\n end\n \n subgraph \"Mailer Module\"\n MailerService\n end\n \n subgraph \"Config\"\n MailConfig\n end\n```\n\n## Configuration\n\n### Environment Variables\n\nThe mail service requires the following environment variables:\n\n| Variable | Required | Default | Description |\n|----------|----------|---------|-------------|\n| `MAIL_PORT` | No | 587 | SMTP port |\n| `MAIL_HOST` | Yes | — | SMTP server hostname |\n| `MAIL_USER` | No | — | SMTP authentication username |\n| `MAIL_PASSWORD` | No | — | SMTP authentication password |\n| `MAIL_DEFAULT_EMAIL` | Yes | — | Sender email address |\n| `MAIL_DEFAULT_NAME` | Yes | — | Sender display name |\n| `MAIL_IGNORE_TLS` | Yes | — | Skip TLS verification |\n| `MAIL_SECURE` | Yes | — | Use TLS/SSL (true for port 465) |\n| `MAIL_REQUIRE_TLS` | Yes | — | Require TLS connection |\n\n### Configuration Type\n\n```typescript\ntype MailConfig = {\n port: number;\n host?: string;\n user?: string;\n password?: string;\n defaultEmail?: string;\n defaultName?: string;\n ignoreTLS: boolean;\n secure: boolean;\n requireTLS: boolean;\n};\n```\n\nConfiguration is registered via NestJS ConfigModule and validated at startup using class-validator decorators.\n\n## MailerService\n\nLocated at `src/mailer/mailer.service.ts`, this service wraps nodemailer and handles:\n\n1. **SMTP Transport** — Creates a nodemailer transporter on initialization using config values\n2. **Template Compilation** — Reads Handlebars templates from disk and compiles them with context data\n3. **Sending** — Delivers emails via SMTP with proper sender attribution\n\n### sendMail Method\n\n```typescript\nasync sendMail({\n templatePath,\n context,\n ...mailOptions\n}: nodemailer.SendMailOptions & {\n templatePath: string;\n context: Record<string, unknown>;\n}): Promise<void>\n```\n\n**Parameters:**\n- `templatePath` — Absolute path to the `.hbs` template file\n- `context` — Data to interpolate into the template\n- `mailOptions` — Standard nodemailer options (to, subject, from, etc.)\n\n**Behavior:**\n- If `templatePath` is provided, reads and compiles the Handlebars template\n- If `html` is also provided in `mailOptions`, the explicit HTML takes precedence\n- Sets the `from` address to the configured default sender if not specified\n- Throws on send failure\n\n## MailService\n\nLocated at `src/mail/mail.service.ts`, this service provides application-specific email operations. It depends on `MailerService` and `ConfigService`.\n\n### userSignUp\n\nSends an account activation email.\n\n```typescript\nasync userSignUp(mailData: MailData<{ hash: string }>): Promise<void>\n```\n\n**Flow:**\n1. Retrieves localized strings via `nestjs-i18n` (confirmEmail, confirm-email.text1-3)\n2. Constructs a confirmation URL with the hash parameter\n3. Renders `activation.hbs` template with context data\n4. Calls `MailerService.sendMail()` to deliver\n\n**Mail Data:**\n```typescript\n{\n to: string; // Recipient email\n data: { hash: string } // Activation hash for URL\n}\n```\n\n### forgotPassword\n\nSends a password reset email.\n\n```typescript\nasync forgotPassword(\n mailData: MailData<{ hash: string; tokenExpires: number }>\n): Promise<void>\n```\n\n**Flow:**\n1. Retrieves localized strings (resetPassword, reset-password.text1-4)\n2. Constructs a password change URL with hash and expiration timestamp\n3. Renders `reset-password.hbs` template\n4. Sends via `MailerService`\n\n**Mail Data:**\n```typescript\n{\n to: string;\n data: {\n hash: string; // Reset token\n tokenExpires: number; // Expiration timestamp\n }\n}\n```\n\n### confirmNewEmail\n\nSends an email change confirmation when users request to update their email address.\n\n```typescript\nasync confirmNewEmail(mailData: MailData<{ hash: string }>): Promise<void>\n```\n\n**Flow:**\n1. Retrieves localized strings (confirmEmail, confirm-new-email.text1-3)\n2. Constructs a confirmation URL with the hash parameter\n3. Renders `confirm-new-email.hbs` template\n4. Sends via `MailerService`\n\n## Email Templates\n\nTemplates are stored in `src/mail/mail-templates/` as Handlebars (`.hbs`) files.\n\n### Template Variables\n\nAll templates receive these context variables:\n\n| Variable | Description |\n|----------|-------------|\n| `title` | Email subject/heading |\n| `url` | Action URL for the recipient |\n| `actionTitle` | Text displayed on the CTA button |\n| `app_name` | Application name from config |\n| `text1`, `text2`, etc. | Localized body text |\n\n### Template Structure\n\nTemplates use inline HTML with a table-based layout for email client compatibility:\n\n```html\n<table>\n <tr style=\"background:#eeeeee\">\n <td><!-- App header with app_name --><\/td>\n <\/tr>\n <tr>\n <td><!-- Body text (text1, text2, etc.) --><\/td>\n <\/tr>\n <tr>\n <td><!-- CTA button linking to url --><\/td>\n <\/tr>\n<\/table>\n```\n\n## Integration\n\n### Module Registration\n\n```typescript\n// src/mail/mail.module.ts\n@Module({\n imports: [ConfigModule, MailerModule],\n providers: [MailService],\n exports: [MailService],\n})\nexport class MailModule {}\n```\n\nThe `MailModule` is global in the sense that it exports `MailService` for use throughout the application. Import it in any module that needs to send emails.\n\n### Using the Service\n\nInject `MailService` into consumers:\n\n```typescript\nconstructor(private readonly mailService: MailService) {}\n\nasync onRegister(user: User) {\n await this.mailService.userSignUp({\n to: user.email,\n data: { hash: user.emailConfirmHash },\n });\n}\n```\n\n### Internationalization\n\nThe service uses `nestjs-i18n` to retrieve localized strings. Ensure i18n is configured and translation files contain the required keys:\n\n```\ncommon.confirmEmail\ncommon.resetPassword\nconfirm-email.text1\nconfirm-email.text2\nconfirm-email.text3\nreset-password.text1\nreset-password.text2\nreset-password.text3\nreset-password.text4\nconfirm-new-email.text1\nconfirm-new-email.text2\nconfirm-new-email.text3\n```\n\nIf `I18nContext.current()` returns null (i18n not available), the localized variables remain undefined and templates handle missing values gracefully.\n\n## Error Handling\n\n- **Configuration validation** — Fails at application startup if required env vars are missing or invalid\n- **Template not found** — `fs.readFile` throws if template path is invalid\n- **SMTP failures** — Nodemailer errors propagate and should be caught by callers or a global exception filter","file-management":"# File Management\n\n# File Management Module\n\nThe File Management module provides a unified interface for file uploads and storage across multiple backends. It supports three storage drivers (local filesystem, AWS S3, and AWS S3 with presigned URLs) and works with both relational (TypeORM) and document (MongoDB) databases.\n\n## Architecture Overview\n\nThe module uses a plugin architecture that dynamically loads the appropriate storage and persistence implementations based on configuration:\n\n```mermaid\ngraph TB\n subgraph \"FilesModule\"\n FS[FilesService]\n end\n \n subgraph \"Configuration\"\n FC[file.config.ts]\n FCT[FileConfig]\n FD[FileDriver enum]\n end\n \n subgraph \"Persistence Layer\"\n FR[FileRepository<br/>abstract]\n FRR[Relational Repository<br/>TypeORM]\n FDR[Document Repository<br/>Mongoose]\n end\n \n subgraph \"Uploaders\"\n FL[Local Module]\n FS3[S3 Module]\n FP[S3 Presigned Module]\n end\n \n FC --> FD\n FC --> FCT\n FS --> FR\n \n FR --> FRR\n FR --> FDR\n \n FCT --> FL\n FCT --> FS3\n FCT --> FP\n```\n\n## Configuration\n\n### FileDriver Enum\n\nLocated in `src/files/config/file-config.type.ts`, defines the three supported storage backends:\n\n```typescript\nexport enum FileDriver {\n LOCAL = 'local', // Local filesystem storage\n S3 = 's3', // Direct S3 upload\n S3_PRESIGNED = 's3-presigned' // S3 with presigned URLs for client-side upload\n}\n```\n\n### FileConfig Type\n\n```typescript\nexport type FileConfig = {\n driver: FileDriver;\n accessKeyId?: string; // AWS access key (S3 drivers only)\n secretAccessKey?: string; // AWS secret key (S3 drivers only)\n awsDefaultS3Bucket?: string; // S3 bucket name (S3 drivers only)\n awsS3Region?: string; // AWS region (S3 drivers only)\n maxFileSize: number; // Maximum upload size in bytes (default: 5MB)\n};\n```\n\n### Environment Variables\n\n| Variable | Required For | Description |\n|----------|--------------|-------------|\n| `FILE_DRIVER` | All | Storage driver: `local`, `s3`, or `s3-presigned` |\n| `ACCESS_KEY_ID` | S3, S3_PRESIGNED | AWS access key ID |\n| `SECRET_ACCESS_KEY` | S3, S3_PRESIGNED | AWS secret access key |\n| `AWS_DEFAULT_S3_BUCKET` | S3, S3_PRESIGNED | S3 bucket name |\n| `AWS_S3_REGION` | S3, S3_PRESIGNED | AWS region (e.g., `us-east-1`) |\n\n## Domain Model\n\n### FileType\n\nThe `FileType` class in `src/files/domain/file.ts` represents a file entity with automatic URL generation based on the configured driver:\n\n```typescript\nexport class FileType {\n id: string; // UUID\n path: string; // Storage path or S3 key\n}\n```\n\n**Path Transformation**: The `path` property uses a class-transformer `@Transform` decorator to convert storage paths to accessible URLs:\n\n- **Local driver**: Prepends the backend domain (`appConfig().backendDomain`)\n- **S3/S3_PRESIGNED drivers**: Generates a pre-signed URL valid for 1 hour using AWS SDK's `getSignedUrl()`\n\n## Persistence Layer\n\nThe module supports both relational and document databases through a repository pattern.\n\n### Abstract Repository\n\n`FileRepository` defines the contract:\n\n```typescript\nabstract class FileRepository {\n abstract create(data: Omit<FileType, 'id'>): Promise<FileType>;\n abstract findById(id: FileType['id']): Promise<NullableType<FileType>>;\n abstract findByIds(ids: FileType['id']): Promise<FileType[]>;\n}\n```\n\n### Relational Implementation (TypeORM)\n\n- Entity: `FileEntity` in `src/files/infrastructure/persistence/relational/entities/file.entity.ts`\n- Repository: `FileRelationalRepository` in `src/files/infrastructure/persistence/relational/repositories/file.repository.ts`\n- Database table: `file` with columns `id` (UUID primary key) and `path` (string)\n\n### Document Implementation (MongoDB)\n\n- Schema: `FileSchemaClass` in `src/files/infrastructure/persistence/document/entities/file.schema.ts`\n- Repository: `FileDocumentRepository` in `src/files/infrastructure/persistence/document/repositories/file.repository.ts`\n- Collection: `files` with `_id` and `path` fields\n\n### Database Selection\n\nThe module automatically selects the appropriate persistence implementation based on `databaseConfig().isDocumentDatabase`:\n\n```typescript\nconst infrastructurePersistenceModule = databaseConfig().isDocumentDatabase\n ? DocumentFilePersistenceModule\n : RelationalFilePersistenceModule;\n```\n\n## Upload Implementations\n\n### Local Storage (`FilesLocalModule`)\n\nStores files on the local filesystem using Multer with disk storage.\n\n**Endpoint**: `POST /api/v1/files/upload`\n\n- Requires JWT authentication\n- Accepts multipart form data with `file` field\n- Stores files in `./files` directory\n- Generates random filename with original extension\n- Validates file type (jpg, jpeg, png, gif only)\n- Enforces `maxFileSize` limit\n\n**Download**: `GET /api/v1/files/:path` serves files from the local storage.\n\n### Direct S3 Upload (`FilesS3Module`)\n\nUploads files directly to AWS S3 via Multer's S3 storage engine.\n\n**Endpoint**: `POST /api/v1/files/upload`\n\n- Requires JWT authentication\n- Accepts multipart form data with `file` field\n- Uses `multer-s3` for streaming upload to S3\n- Sets `public-read` ACL on uploaded objects\n- Validates file type and size\n\n### Presigned URL (`FilesS3PresignedModule`)\n\nGenerates pre-signed URLs that clients use to upload directly to S3, bypassing the server.\n\n**Endpoint**: `POST /api/v1/files/upload`\n\n- Requires JWT authentication\n- Accepts JSON body with `fileName` and `fileSize`\n- Returns both the file record and a pre-signed `uploadSignedUrl`\n- Validates file type and size server-side before generating URL\n- URL expires in 3600 seconds (1 hour)\n\n**Request DTO**:\n```typescript\nclass FileUploadDto {\n @IsString() fileName: string;\n @IsNumber() fileSize: number;\n}\n```\n\n**Response**:\n```typescript\n{\n file: FileType;\n uploadSignedUrl: string; // Pre-signed PUT URL\n}\n```\n\n## Module Registration\n\nThe `FilesModule` dynamically imports the appropriate uploader based on configuration:\n\n```typescript\nconst infrastructureUploaderModule =\n fileConfig().driver === FileDriver.LOCAL\n ? FilesLocalModule\n : fileConfig().driver === FileDriver.S3\n ? FilesS3Module\n : FilesS3PresignedModule;\n```\n\n## Usage in Other Modules\n\nThe `FilesService` provides read operations for other parts of the application:\n\n```typescript\n@Injectable()\nexport class FilesService {\n constructor(private readonly fileRepository: FileRepository) {}\n\n findById(id: FileType['id']): Promise<NullableType<FileType>>;\n findByIds(ids: FileType['id'][]): Promise<FileType[]>;\n}\n```\n\nThe module exports both `FilesService` and the persistence module, allowing other modules to inject `FileRepository` directly for file creation or use `FilesService` for retrieval.\n\n## Cross-Module Integration\n\nThe File module integrates with the User module through mappers. When users have associated files (e.g., avatars), the user mappers transform file data:\n\n```\nUserRepository.findById() \n → UserMapper.toDomain() \n → FileMapper.toDomain() \n → FileType\n```\n\nThis allows file references in user entities to be automatically resolved to accessible URLs.","internationalization":"# Internationalization\n\n# Internationalization (i18n) Module\n\n## Overview\n\nThis module provides localized strings for the application's email templates and common UI elements. It uses a JSON-based translation system organized by language and namespace.\n\n## Supported Languages\n\n| Code | Language | Direction |\n|------|----------|-----------|\n| `ar` | Arabic | RTL |\n| `en` | English | LTR |\n| `es` | Spanish | LTR |\n| `fr` | French | LTR |\n| `hi` | Hindi | LTR |\n| `uk` | Ukrainian | LTR |\n| `zh` | Chinese (Simplified) | LTR |\n\n## Directory Structure\n\n```\nsrc/i18n/\n├── ar/\n│ ├── common.json\n│ ├── confirm-email.json\n│ ├── confirm-new-email.json\n│ └── reset-password.json\n├── en/\n│ └── ...\n├── es/\n│ └── ...\n├── fr/\n│ └── ...\n├── hi/\n│ └── ...\n├── uk/\n│ └── ...\n└── zh/\n └── ...\n```\n\n## Namespaces\n\nEach language directory contains the same set of translation files (namespaces):\n\n### `common.json`\nShared strings used across multiple templates:\n- `confirmEmail` — Label for email confirmation action\n- `resetPassword` — Label for password reset action\n\n### `confirm-email.json`\nContent for the initial email verification flow:\n- `text1` — Greeting\n- `text2` — Welcome message\n- `text3` — Call-to-action instruction\n\n### `confirm-new-email.json`\nContent for confirming a changed email address:\n- `text1` — Greeting\n- `text2` — Request to confirm new email\n- `text3` — Call-to-action instruction\n\n### `reset-password.json`\nContent for the password recovery flow:\n- `text1` — Problem statement (trouble signing in)\n- `text2` — Reassurance message\n- `text3` — Instructions\n- `text4` — Security disclaimer (ignore if not requested)\n\n## Usage\n\nConsume these translations using your application's i18n library. Typical usage patterns:\n\n```javascript\n// Example: loading translations\nimport en from './i18n/en/common.json';\nimport ar from './i18n/ar/common.json';\n\n// Example: accessing a translation key\nconst greeting = en.confirmEmail; // \"Confirm email\"\n```\n\n## Adding a New Language\n\nTo add support for a new language:\n\n1. Create a new directory under `src/i18n/` with the appropriate ISO 639-1 code\n2. Copy all four JSON files (`common.json`, `confirm-email.json`, `confirm-new-email.json`, `reset-password.json`) into the new directory\n3. Translate all string values\n4. Update any language detection logic in your application to recognize the new locale code\n\n## Adding a New Translation Key\n\nWhen adding new translatable strings:\n\n1. Add the key to **all** language files in the appropriate namespace\n2. Keep the key name consistent across languages (use English as the canonical key name)\n3. Maintain the same JSON structure across all language directories\n\nExample adding a new common string:\n\n```json\n// src/i18n/en/common.json\n{\n \"confirmEmail\": \"Confirm email\",\n \"resetPassword\": \"Reset password\",\n \"newKey\": \"New translated string\"\n}\n\n// src/i18n/ar/common.json\n{\n \"confirmEmail\": \"تأكيد البريد الإلكتروني\",\n \"resetPassword\": \"إعادة تعيين كلمة المرور\",\n \"newKey\": \"نص مترجم جديد\"\n}\n```\n\n## RTL Considerations\n\nThe Arabic (`ar`) translations use RTL (right-to-left) text direction. Ensure your UI handles RTL layout properly when rendering Arabic content. The translation content itself is already localized for Arabic-speaking users.","other-admin":"# Other — admin\n\n# Admin E2E Test Module\n\n## Overview\n\nThis module contains end-to-end (E2E) tests for the admin functionality of the application. The tests verify that the REST API endpoints related to authentication and user management work correctly when accessed with admin privileges.\n\nThe test suite uses **Supertest** to make HTTP requests against the running application, simulating real client interactions.\n\n## Test Files\n\n### `auth.e2e-spec.ts`\n\nTests admin authentication flows:\n\n- **Admin Login** (`POST /api/v1/auth/email/login`): Verifies that an admin user can successfully authenticate using email and password. The test validates that the response includes a JWT token and user object with email and role properties.\n\n### `users.e2e-spec.ts`\n\nTests user management operations that require admin privileges:\n\n- **User Creation** (`POST /api/v1/users`): Tests creating new users with valid data (returns 201) and validates that invalid email addresses are rejected (returns 422).\n- **User Update** (`PATCH /api/v1/users/:id`): Tests modifying existing user properties including email and password.\n- **Get Users** (`GET /api/v1/users`): Tests retrieving a paginated list of users, verifying that sensitive fields (hash, password) are excluded from responses.\n\n## Test Dependencies\n\nThe tests depend on several constants and enums defined elsewhere in the codebase:\n\n| Import | Source | Purpose |\n|--------|--------|---------|\n| `APP_URL` | `test/utils/constants` | Base URL for the test application |\n| `ADMIN_EMAIL` | `test/utils/constants` | Pre-existing admin credentials |\n| `ADMIN_PASSWORD` | `test/utils/constants` | Pre-existing admin credentials |\n| `RoleEnum` | `src/roles/roles.enum` | User role constants (e.g., admin, user) |\n| `StatusEnum` | `src/statuses/statuses.enum` | User status constants (e.g., active) |\n\n## Test Flow\n\n### Authentication Flow\n\n```\n┌─────────────────────────────────────────────────────────────┐\n│ Test Authentication Flow │\n├─────────────────────────────────────────────────────────────┤\n│ 1. POST /api/v1/auth/email/login │\n│ └─> Returns { token, user } │\n│ 2. Use token in Authorization header for subsequent │\n│ └─> Authorization: Bearer <token> │\n└─────────────────────────────────────────────────────────────┘\n```\n\n### User Management Flow\n\n```\n┌─────────────────────────────────────────────────────────────┐\n│ Admin User Management Flow │\n├─────────────────────────────────────────────────────────────┤\n│ │\n│ Login as Admin ──> Get Bearer Token │\n│ │ │\n│ ├──> POST /api/v1/users (Create user) │\n│ ├──> PATCH /api/v1/users/:id (Update user) │\n│ └──> GET /api/v1/users (List users) │\n│ │\n└─────────────────────────────────────────────────────────────┘\n```\n\n## Test Patterns\n\n### Bearer Token Authentication\n\nAfter logging in, the admin token is stored and used for subsequent authenticated requests:\n\n```typescript\n// Store token after login\napiToken = body.token;\n\n// Use in subsequent requests\n.request(app)\n .patch(`/api/v1/users/${userId}`)\n .auth(apiToken, { type: 'bearer' })\n```\n\n### Response Validation\n\nTests verify both HTTP status codes and response body structure:\n\n```typescript\n.expect(200)\n.expect(({ body }) => {\n expect(body.token).toBeDefined();\n expect(body.user.email).toBeDefined();\n});\n```\n\n### Data Cleanup via Login\n\nWhen creating test users, the tests verify credentials work by attempting login after creation:\n\n```typescript\n// Create user\nawait request(app).post('/api/v1/users').send({ ... });\n\n// Verify user can login\nawait request(app)\n .post('/api/v1/auth/email/login')\n .send({ email: newUserEmail, password: newUserPassword })\n .expect(200);\n```\n\n## API Endpoints Covered\n\n| Method | Endpoint | Description |\n|--------|----------|-------------|\n| POST | `/api/v1/auth/email/login` | Authenticate with email/password |\n| POST | `/api/v1/auth/email/register` | Register new user (self-service) |\n| GET | `/api/v1/users` | List all users (admin only) |\n| POST | `/api/v1/users` | Create new user (admin only) |\n| PATCH | `/api/v1/users/:id` | Update existing user (admin only) |\n\n## Running the Tests\n\nThese tests require the application to be running. Execute with:\n\n```bash\n# Start the application first, then run tests\nnpm run test:admin\n```\n\nOr run all E2E tests:\n\n```bash\nnpm run test:e2e\n```\n\n## Integration with Main Codebase\n\nThe tests validate the actual API implementation by making real HTTP requests. They depend on:\n\n- **Auth Module**: The `/api/v1/auth/*` endpoints for login and registration\n- **Users Module**:The `/api/v1/users` endpoints for CRUD operations\n- **Roles & Statuses**:Enum values used when creating users\n\nThe tests do not mock these services—any changes to the underlying API will cause test failures, making them effective regression tests for admin functionality.","other-agents":"# Other — agents\n\n# Agents Module\n\nThis module contains documentation and configuration for autonomous agents that operate on this repository. The agents handle issue tracking, triage, and workflow automation.\n\n## Overview\n\nThe agents module defines how autonomous agents interact with the repository's issue tracker and follow domain conventions. It consists of three documentation files that collectively define:\n\n1. **Domain conventions** — How agents should use consistent terminology\n2. **Issue tracking workflow** — How agents create and manipulate GitHub issues\n3. **Triage labels** — The state machine labels agents use to progress issues\n\n## Components\n\n### Domain Documentation (`domain.md`)\n\nEstablishes rules for agents to consume domain language correctly:\n\n- **Context file**: Agents reference `CONTEXT.md` at the repo root for terminology\n- **ADRs**: Agents check `docs/adr/*.md` before proposing changes that might conflict with past architectural decisions\n- **Updates**: When agents encounter new domain concepts, they should update `CONTEXT.md`; significant architectural choices get recorded in new ADRs\n\nThis ensures agents maintain consistent language and don't inadvertently contradict established architectural decisions.\n\n### Issue Tracker Integration (`issue-tracker.md`)\n\nDocuments the GitHub-based workflow agents use:\n\n| Operation | Command/Method |\n|-----------|----------------|\n| Read issues | `gh issue list`, `gh issue view <number>` |\n| Create issues | `gh issue create --title \"<title>\" --body \"<body>\"` |\n| Transition states | `gh issue edit <number> --add-label \"<label>\" --remove-label \"<label>\"` |\n\n**Automation rules:**\n- `to-issues` and `to-prd` agents create issues on GitHub\n- `triage` agent evaluates issues and applies labels\n\n### Triage Labels (`triage-labels.md`)\n\nDefines the label-based state machine the `triage` skill uses to move issues through their lifecycle:\n\n```mermaid\nstateDiagram-v2\n [*] --> needs-triage: Issue created\n needs-triage --> needs-info: More details required\n needs-triage --> ready-for-agent: Fully specified\n needs-triage --> ready-for-human: Needs implementation\n needs-triage --> wontfix: Rejected\n needs-info --> needs-triage: Info received\n ready-for-agent --> [*]: Work completed\n ready-for-human --> [*]: Implemented\n wontfix --> [*]: Closed\n```\n\n| Agent Role | Label | Purpose |\n|------------|-------|---------|\n| `needs-triage` | `needs-triage` | Maintainer evaluation pending |\n| `needs-info` | `question` | Waiting on reporter response |\n| `ready-for-agent` | `ready-for-agent` | Ready for autonomous work |\n| `ready-for-human` | `help wanted` | Requires human implementation |\n| `wontfix` | `wontfix` | Will not be actioned |\n\nIf a required label doesn't exist, the agent creates it automatically (works with both GitHub and GitLab).\n\n## Agent Interactions\n\nThe documented agents form a simple pipeline:\n\n```\nUser/External → GitHub Issue → triage (labels issue) → ready-for-agent/ready-for-human\n ↓\n to-issues / to-prd (creates follow-up issues)\n```\n\n## Relationship to Broader Codebase\n\nThis module is documentation-first — it defines conventions that other code modules should follow:\n\n- **Code contributors**: Check `CONTEXT.md` for domain terminology before introducing new concepts\n- **ADR authors**: Create new ADRs in `docs/adr/` for significant architectural decisions\n- **Agent developers**: Ensure any new agents respect the label conventions in `triage-labels.md`","other-claude-md":"# Other — CLAUDE.md\n\n# CLAUDE.md — Developer Guidance Configuration\n\nThis file is the **project-specific configuration for Claude Code** (the AI assistant). It defines how Claude should behave when working in this repository, including required workflows, available tools, and safety protocols.\n\n> **Note:** Unlike typical code modules, CLAUDE.md is not executable code. It is a configuration file that provides instructions and context to an AI assistant. The \"call graph\" section shows no execution flows because this file defines behavior rather than implementing it.\n\n---\n\n## Overview\n\nCLAUDE.md serves as the contract between the development team and Claude Code. It ensures that:\n\n1. **Safety checks are mandatory** — Impact analysis must run before any code modification\n2. **Tools are used correctly** — GitNexus tools for code intelligence, `gh` CLI for issue tracking\n3. **Best practices are enforced** — No direct symbol renaming, no commits without change verification\n4. **Context is available** — References to domain docs, ADR records, and execution flow documentation\n\n---\n\n## File Structure\n\n```\nCLAUDE.md\n├── Agent Skills (references to external skill docs)\n│ ├── Issue tracker (gh CLI)\n│ ├── Triage labels\n│ └── Domain docs (CONTEXT.md, docs/adr/)\n└── GitNexus Code Intelligence\n ├── Mandatory workflows (Always Do)\n ├── Prohibited actions (Never Do)\n ├── Resource URLs\n └── CLI reference table\n```\n\n---\n\n## Agent Skills Section\n\nThis section references external skill files that define domain-specific behavior for Claude.\n\n### Issue Tracker\n\n```markdown\n### Issue tracker\nIssues are tracked on GitHub using the `gh` CLI. See `docs/agents/issue-tracker.md`.\n```\n\nClaude uses the GitHub CLI (`gh`) for all issue-related operations rather than the web interface. The referenced document likely contains:\n- Command patterns for creating, updating, and closing issues\n- Label application workflows\n- Issue search and filter patterns\n\n### Triage Labels\n\n```markdown\n### Triage labels\nUsing standard triage roles mapped to GitHub labels. See `docs/agents/triage-labels.md`.\n```\n\nDefines the label schema used for issue triage. This ensures consistent labeling across the project and enables automated triage workflows.\n\n### Domain Docs\n\n```markdown\n### Domain docs\nSingle-context layout with `CONTEXT.md` and `docs/adr/`. See `docs/agents/domain.md`.\n```\n\nEstablishes the documentation layout:\n- **`CONTEXT.md`** — Project-wide context for the current working directory\n- **`docs/adr/`** — Architecture Decision Records\n\nThe referenced skill file likely explains how Claude should read and maintain these documents.\n\n---\n\n## GitNexus Code Intelligence Section\n\nThis is the core of the configuration. GitNexus is a code intelligence platform that indexes the codebase and provides MCP (Model Context Protocol) tools for:\n\n- Impact analysis (what breaks if I change X?)\n- Change detection (what did my changes affect?)\n- Code exploration (find execution flows by concept)\n- Symbol context (callers, callees, participation in execution flows)\n\n### Index Statistics\n\n```\nThis project is indexed by GitNexus as **control-center** (2163 symbols, 3856 relationships, 88 execution flows).\n```\n\n| Metric | Value | Implication |\n|--------|-------|-------------|\n| Symbols | 2,163 | Functions, classes, methods tracked |\n| Relationships | 3,856 | Call graph edges, dependencies |\n| Execution Flows | 88 | Distinct code paths through the system |\n\nThese statistics indicate a medium-sized codebase with moderate complexity. The relationship count (~1.8x symbol count) suggests a well-connected architecture.\n\n---\n\n## Mandatory Workflows (\"Always Do\")\n\nThese rules **must** be followed on every relevant action:\n\n### Impact Analysis Requirement\n\n```markdown\n- **MUST run impact analysis before editing any symbol.** Before modifying a function, class, or method, run `gitnexus_impact({target: \"symbolName\", direction: \"upstream\"})` and report the blast radius (direct callers, affected processes, risk level) to the user.\n```\n\n**What it does:**\n- `gitnexus_impact` analyzes a symbol and returns:\n - Direct callers (functions/methods that call this symbol)\n - Indirect callers (callers of callers, recursively)\n - Affected execution flows\n - Risk level assessment (LOW / MEDIUM / HIGH / CRITICAL)\n\n**Workflow:**\n1. User requests a code change\n2. Claude identifies the target symbol\n3. Runs `gitnexus_impact` with `direction: \"upstream\"` (finds what calls this symbol)\n4. Reports blast radius to user\n5. Only proceeds after user acknowledgment\n\n### Change Detection Requirement\n\n```markdown\n- **MUST run `gitnexus_detect_changes()` before committing** to verify your changes only affect expected symbols and execution flows.\n```\n\n**What it does:**\n- Compares staged changes against the index\n- Reports all symbols and execution flows affected by the changes\n- Warns if changes affect symbols outside the intended scope\n\n**Purpose:** Prevents accidental modifications to unrelated code. This is especially important in large refactoring operations.\n\n### Risk Warning Requirement\n\n```markdown\n- **MUST warn the user** if impact analysis returns HIGH or CRITICAL risk before proceeding with edits.\n```\n\nClaude must not proceed with HIGH or CRITICAL changes without explicit user confirmation after seeing the risk assessment.\n\n### Code Exploration\n\n```markdown\n- When exploring unfamiliar code, use `gitnexus_query({query: \"concept\"})` to find execution flows instead of grepping. It returns process-grouped results ranked by relevance.\n```\n\n**Why this matters:**\n- `grep` finds text matches but not semantic relationships\n- `gitnexus_query` finds execution flows containing relevant concepts\n- Results are grouped by process and ranked by relevance\n\n### Symbol Context\n\n```markdown\n- When you need full context on a specific symbol — callers, callees, which execution flows it participates in — use `gitnexus_context({name: \"symbolName\"})`.\n```\n\nProvides a complete picture of a symbol's role in the codebase.\n\n---\n\n## Prohibited Actions (\"Never Do\")\n\nThese actions are **never allowed** regardless of context:\n\n| Rule | Rationale |\n|------|-----------|\n| Never edit a symbol without running impact analysis first | Prevents unintended breakage |\n| Never ignore HIGH or CRITICAL risk warnings | Ensures user awareness of consequences |\n| Never rename symbols with find-and-replace | Use `gitnexus_rename` which understands call graph relationships and updates all references correctly |\n| Never commit without running `gitnexus_detect_changes()` | Verifies scope of changes matches intent |\n\n---\n\n## Resource URLs\n\nThe configuration provides shortcuts to GitNexus views:\n\n```markdown\n| Resource | Use for |\n|----------|---------|\n| `gitnexus://repo/control-center/context` | Codebase overview, check index freshness |\n| `gitnexus://repo/control-center/clusters` | All functional areas |\n| `gitnexus://repo/control-center/processes` | All execution flows |\n| `gitnexus://repo/control-center/process/{name}` | Step-by-step execution trace |\n```\n\nThese URLs can be opened in a GitNexus-aware client to visualize the codebase.\n\n---\n\n## CLI Reference Table\n\nMaps tasks to the appropriate skill files:\n\n| Task | Read this skill file |\n|------|---------------------|\n| Understand architecture / \"How does X work?\" | `.claude/skills/gitnexus/gitnexus-exploring/SKILL.md` |\n| Blast radius / \"What breaks if I change X?\" | `.claude/skills/gitnexus/gitnexus-impact-analysis/SKILL.md` |\n| Trace bugs / \"Why is X failing?\" | `.claude/skills/gitnexus/gitnexus-debugging/SKILL.md` |\n| Rename / extract / split / refactor | `.claude/skills/gitnexus/gitnexus-refactoring/SKILL.md` |\n| Tools, resources, schema reference | `.claude/skills/gitnexus/gitnexus-guide/SKILL.md` |\n| Index, status, clean, wiki CLI commands | `.claude/skills/gitnexus/gitnexus-cli/SKILL.md` |\n\n---\n\n## Integration with the Rest of the Codebase\n\nCLAUDE.md connects to the broader development infrastructure:\n\n```\n┌─────────────────────────────────────────────────────────────┐\n│ CLAUDE.md │\n│ (configuration for Claude Code) │\n└─────────────────────┬───────────────────────────────────────┘\n │\n ┌─────────────┼─────────────┐\n ▼ ▼ ▼\n ┌─────────┐ ┌──────────┐ ┌─────────────┐\n │ GitNexus│ │ gh CLI │ │ Domain Docs│\n │ Index │ │ (GitHub) │ │ (CONTEXT.md,│\n │ │ │ │ │ docs/adr/) │\n └─────────┘ └──────────┘ └─────────────┘\n │ │ │\n ▼ ▼ ▼\n ┌─────────────────────────────────────────┐\n │ Project Codebase │\n │ (2163 symbols, 3856 relationships) │\n └─────────────────────────────────────────┘\n```\n\n### GitNexus Integration\n\nThe GitNexus MCP tools are the primary mechanism for safe code modification:\n\n1. **Before editing** → `gitnexus_impact` to understand blast radius\n2. **During exploration** → `gitnexus_query` to find relevant flows\n3. **When renaming** → `gitnexus_rename` to update all references\n4. **Before committing** → `gitnexus_detect_changes` to verify scope\n\n### GitHub Integration\n\nThe `gh` CLI is used for issue tracking, ensuring all project management happens in the same terminal environment as code operations.\n\n### Documentation Integration\n\nClaude maintains awareness of project context through:\n- `CONTEXT.md` — Current working directory context\n- `docs/adr/` — Architecture decisions\n- `docs/agents/*` — Agent-specific skills\n\n---\n\n## Modifying This File\n\nIf you need to update CLAUDE.md:\n\n1. **Understand the impact** — This file shapes all future Claude interactions\n2. **Check index freshness** — Run `npx gitnexus analyze` if the project statistics change significantly\n3. **Maintain the structure** — The \"Always Do\" / \"Never Do\" pattern is essential for safety\n4. **Update references** — If skill files move, update the paths in this configuration\n\n---\n\n## Index Refresh Procedure\n\nIf Claude warns that the GitNexus index is stale:\n\n```bash\nnpx gitnexus analyze\n```\n\nThis command:\n- Re-scans the codebase\n- Updates symbol definitions\n- Rebuilds relationship graph\n- Recomputes execution flows\n\nRun this after significant refactoring or adding large new modules.","other-commitlint-config-js":"# Other — commitlint.config.js\n\n# commitlint.config.js\n\n## Overview\n\nThis module configures **commitlint**, a linting tool for Git commit messages. It ensures all commit messages in the repository follow a consistent format.\n\n## Configuration\n\n```javascript\nmodule.exports = {\n extends: ['@commitlint/config-conventional'],\n};\n```\n\nThe configuration extends `@commitlint/config-conventional`, which enforces the [Conventional Commits](https://www.conventionalcommits.org/) specification.\n\n## What This Configures\n\n### Conventional Commits Format\n\nWhen enabled, commit messages must follow this structure:\n\n```\n<type>(<scope>): <description>\n\n[optional body]\n\n[optional footer(s)]\n```\n\n**Allowed `<type>` values:**\n- `feat` — a new feature\n- `fix` — a bug fix\n- `docs` — documentation only changes\n- `style` — changes that don't affect code meaning (formatting, semicolons, etc.)\n- `refactor` — code change that neither fixes a bug nor adds a feature\n- `perf` — code change that improves performance\n- `test` — adding or correcting tests\n- `build` — changes to build system or dependencies\n- `ci` — changes to CI configuration\n- `chore` — other changes that don't modify src or test files\n- `revert` — reverts a previous commit\n\n**Examples:**\n\n```\nfeat(auth): add OAuth2 login support\nfix(api): handle null response from user endpoint\ndocs(readme): update installation instructions\n```\n\n## Integration\n\nThis configuration is typically used in combination with:\n\n- **Husky** — runs commitlint as a Git pre-commit hook\n- **CI pipelines** — validates commits on pull requests\n\n## Customization\n\nTo add custom rules, extend the configuration object:\n\n```javascript\nmodule.exports = {\n extends: ['@commitlint/config-conventional'],\n rules: {\n // Add custom rules here\n 'body-max-line-length': [2, 'always', 100],\n 'scope-case': [2, 'always', 'lower-case'],\n },\n};\n```","other-context-md":"# Other — CONTEXT.md\n\n# CONTEXT.md — Project Context Documentation\n\n## Overview\n\n`CONTEXT.md` is a documentation file within the project that serves as a centralized location for storing project-level context, background information, and architectural decisions. Unlike executable code modules, this file contains static documentation that helps developers understand the broader context of the codebase.\n\n## Purpose\n\nThis module provides:\n\n- **Project background** — High-level explanation of what the project does and why it exists\n- **Architectural context** — Decisions and trade-offs made during design\n- **Terminology** — Domain-specific terms and definitions used throughout the codebase\n- **Links to resources** — External documentation, specifications, or references\n\n## Relationship to Codebase\n\n```\n┌─────────────────────────────────────────────┐\n│ CONTEXT.md │\n│ (Documentation / No executable code) │\n└─────────────────────────────────────────────┘\n```\n\nThis file has no dependencies on other modules and is not referenced by any executable code. It exists purely as a human-readable reference for developers.\n\n## Usage\n\nDevelopers should consult this file when:\n\n- Onboarding to the project\n- Understanding architectural decisions\n- Clarifying domain terminology\n- Finding links to external resources or specifications\n\n## Notes\n\n- This is a documentation-only module with no execution flow\n- It does not contain functions, classes, or any executable logic\n- Content is maintained manually by project contributors","other-coolify-docker-compose-yaml":"# Other — coolify-docker-compose.yaml\n\n# coolify-docker-compose.yaml\n\nDocker Compose configuration for the Coolify application stack. This file defines the containerized services that make up the development and deployment environment.\n\n## Overview\n\nThe compose file defines four services:\n\n- **maildev** — Email development server with SMTP and web UI\n- **adminer** — Database administration interface\n- **redis** — (commented out) Redis cache server\n- **api** — Main application API service\n\n## Services\n\n### maildev\n\nA development-focused SMTP server with a web interface for testing email functionality.\n\n```yaml\nmaildev:\n build:\n context: .\n dockerfile: maildev.Dockerfile\n ports:\n - ${MAIL_CLIENT_PORT}:1080\n - ${MAIL_PORT}:1025\n```\n\n| Port | Purpose |\n|------|---------|\n| `${MAIL_CLIENT_PORT}` → `1080` | Web UI for viewing captured emails |\n| `${MAIL_PORT}` → `1025` | SMTP server for sending test emails |\n\nThe service is built from a custom `maildev.Dockerfile` in the project root. Environment variables control the exposed ports, allowing flexibility across different environments.\n\n### adminer\n\nA lightweight database management tool supporting MySQL, PostgreSQL, SQLite, and other databases.\n\n```yaml\nadminer:\n image: adminer\n restart: always\n ports:\n - 8080:8080\n```\n\n- Uses the official `adminer` image from Docker Hub\n- Always restarts if the container stops\n- Accessible at `http://localhost:8080`\n\n### redis (commented out)\n\nA placeholder Redis service for caching and session storage.\n\n```yaml\n# redis:\n# image: redis:7-alpine\n# ports:\n# - 6379:6379\n```\n\nUncomment to enable Redis support. Uses the Alpine-based image for minimal footprint.\n\n### api\n\nThe core application service.\n\n```yaml\napi:\n build:\n context: .\n dockerfile: Dockerfile\n ports:\n - ${APP_PORT}:${APP_PORT}\n```\n\n- Built from the project's main `Dockerfile`\n- Exposed on `${APP_PORT}` (both host and container)\n- This is the primary service — all other services support its functionality\n\n## Environment Variables\n\nThe compose file references these environment variables:\n\n| Variable | Service | Description |\n|----------|---------|-------------|\n| `APP_PORT` | api | Main application port |\n| `MAIL_CLIENT_PORT` | maildev | Web UI port for maildev |\n| `MAIL_PORT` | maildev | SMTP port for maildev |\n\nThese should be defined in a `.env` file or provided by the Coolify platform.\n\n## Architecture\n\n```mermaid\ngraph TD\n subgraph \"Coolify Stack\"\n API[api service]\n Maildev[maildev]\n Adminer[adminer]\n Redis[redis (optional)]\n end\n \n User --> API\n User --> Adminer\n User --> Maildev\n \n API -.-> Redis\n API -.-> Maildev\n```\n\n## Usage\n\n1. Ensure environment variables are set in `.env`:\n ```\n APP_PORT=8000\n MAIL_CLIENT_PORT=1080\n MAIL_PORT=1025\n ```\n\n2. Start all services:\n ```bash\n docker-compose up -d\n ```\n\n3. Access services:\n - API: `http://localhost:${APP_PORT}`\n - Adminer: `http://localhost:8080`\n - Maildev UI: `http://localhost:${MAIL_CLIENT_PORT}`\n\n## Notes\n\n- The `api` service is the only required service for the application to function\n- `maildev` and `adminer` are development tools — consider removing them in production\n- Redis is disabled by default; uncomment to enable caching support","other-docker-compose-document-ci-yaml":"# Other — docker-compose.document.ci.yaml\n\n# docker-compose.document.ci.yaml\n\n## Overview\n\nThis Docker Compose configuration defines the service infrastructure for running **end-to-end (E2E) tests** on the document service in a CI environment. It provides isolated, reproducible dependencies—MongoDB, Maildev, and the API service—required for executing automated tests.\n\nThe configuration uses environment variables from `.env` files (via `${DATABASE_USERNAME}` and `${DATABASE_PASSWORD}`) and an `env-example-document` file, making it portable across different CI environments.\n\n## Service Architecture\n\n```mermaid\ngraph TB\n subgraph CI Environment\n API[api<br/>document.e2e.Dockerfile] -->|connects| MONGO[mongo<br/>27017]\n API -->|sends test emails| MAIL[maildev<br/>1080/1025]\n end\n```\n\n## Services\n\n### mongo\n\n| Property | Value |\n|-----------|-------|\n| Image | `mongo:8.2.6` |\n| Port | `27017` (internal) |\n| Restart policy | `always` |\n\n**Purpose:** Provides a MongoDB database instance for the document service during test execution. The API service connects to this database to store and retrieve documents.\n\n**Configuration:**\n- Initialized with root credentials from environment variables (`DATABASE_USERNAME`, `DATABASE_PASSWORD`)\n- Port 27017 is exposed internally to other services but not published to the host\n\n---\n\n### maildev\n\n| Property | Value |\n|-----------|-------|\n| Build context | Current directory |\n| Dockerfile | `maildev.Dockerfile` |\n| Ports | `1080` (web UI), `1025` (SMTP) |\n\n**Purpose:** Provides a local SMTP server for capturing emails sent by the application during tests. The web UI on port 1080 allows developers to inspect captured emails, while port 1025 accepts SMTP connections from the application.\n\n**Use case:** Tests that verify email notifications (e.g., document sharing, password reset) can send emails to Maildev and assert on the captured messages.\n\n---\n\n### redis (commented)\n\n```yaml\n# redis:\n# image: redis:7-alpine\n# expose:\n# - 6379\n```\n\n**Purpose:** Optional Redis service for applications that require caching or session storage. Currently disabled—uncomment to enable if the document service uses Redis.\n\n---\n\n### api\n\n| Property | Value |\n|-----------|-------|\n| Build context | Current directory |\n| Dockerfile | `document.e2e.Dockerfile` |\n| Environment | `env-example-document` |\n\n**Purpose:** The main test target—the document service application built specifically for E2E testing. This Dockerfile likely:\n\n- Installs test dependencies\n- Configures the application to connect to `mongo:27017` and `maildev:1025`\n- Runs the test suite (e.g., via `npm test`, `pytest`, or a custom test runner)\n\n## Environment Configuration\n\nThe services reference two sources for environment variables:\n\n1. **Host environment** — `${DATABASE_USERNAME}` and `${DATABASE_PASSWORD}` are interpolated from the host's environment or a `.env` file\n2. **`env-example-document`** — Environment file mounted to the `api` service containing application-specific configuration (database connection strings, feature flags, etc.)\n\n## Usage in CI\n\nTypical CI pipeline integration:\n\n```bash\n# Start services\ndocker-compose -f docker-compose.document.ci.yaml up -d\n\n# Run tests (embedded in the api service)\ndocker-compose -f docker-compose.document.ci.yaml up --abort-on-container-exit\n\n# View test results\ndocker-compose -f docker-compose.document.ci.yaml logs api\n```\n\nThe `--abort-on-container-exit` flag ensures the pipeline fails if any container exits with a non-zero status (test failure).\n\n## Connecting Services\n\nServices communicate over the internal Docker network using service names as hostnames:\n\n| From | To | Hostname |\n|------|-----|----------|\n| api | mongo | `mongo:27017` |\n| api | maildev | `maildev:1025` (SMTP) |\n\n## Extending This Configuration\n\nTo add Redis support for a service that requires it:\n\n```yaml\nredis:\n image: redis:7-alpine\n expose:\n - 6379\n```\n\nThen update `env-example-document` to include the Redis connection string (e.g., `redis://redis:6379`).","other-docker-compose-document-test-yaml":"# Other — docker-compose.document.test.yaml\n\n# docker-compose.document.test.yaml\n\n## Overview\n\nThis Docker Compose file defines the test environment for the document service. It spins up a complete stack required to run integration and end-to-end tests, including a MongoDB database, Maildev for email testing, and the API service itself.\n\n## Services\n\n### mongo\n\n```yaml\nmongo:\n image: mongo:8.2.6\n restart: always\n environment:\n MONGO_INITDB_ROOT_USERNAME: ${DATABASE_USERNAME}\n MONGO_INITDB_ROOT_PASSWORD: ${DATABASE_PASSWORD}\n expose:\n - 27017\n```\n\nMongoDB 8.2.6 instance used as the test database. The container:\n\n- Runs with authentication enabled (credentials loaded from environment variables)\n- Exposes port 27017 internally to other services\n- Uses `restart: always` to ensure it stays running during test sessions\n\n### maildev\n\n```yaml\nmaildev:\n build:\n context: .\n dockerfile: maildev.Dockerfile\n expose:\n - 1080\n - 1025\n```\n\nA custom-built Maildev container for capturing outgoing emails during tests:\n\n- **Port 1080**: Web UI for viewing captured emails\n- **Port 1025**: SMTP server for the application to send mail to\n\nThe service is built from `maildev.Dockerfile` in the project root.\n\n### redis (commented)\n\n```yaml\n# redis:\n# image: redis:7-alpine\n# expose:\n# - 6379\n```\n\nRedis 7 is available but disabled by default. Uncomment to enable if your tests require session storage or caching functionality.\n\n### api\n\n```yaml\napi:\n build:\n context: .\n dockerfile: document.test.Dockerfile\n env_file:\n - env-example-document\n volumes:\n - ./src:/usr/src/app/src\n - ./test:/usr/src/app/test\n```\n\nThe main application service for testing:\n\n- Built from `document.test.Dockerfile` (likely includes test dependencies)\n- Loads environment configuration from `env-example-document`\n- Mounts `src` and `test` directories for live reloading during development\n\n## Architecture\n\n```mermaid\ngraph TB\n subgraph Test Network\n API[api service<br/>:3000]\n Mongo[mongo<br/>:27017]\n Maildev[maildev<br/>:1080, :1025]\n Redis[redis<br/>:6379<br/>--disabled]\n end\n \n API -->|reads/writes| Mongo\n API -->|sends mail| Maildev\n API -->|optional| Redis\n```\n\n## Usage\n\n### Running Tests\n\n```bash\ndocker-compose -f docker-compose.document.test.yaml up -d\ndocker-compose -f docker-compose.document.test.yaml exec api npm test\n```\n\n### Viewing Captured Emails\n\nOpen `http://localhost:1080` in your browser while tests are running to inspect email output.\n\n### Environment Variables\n\nEnsure the following variables are set in your environment or `env-example-document`:\n\n| Variable | Purpose |\n|----------|---------|\n| `DATABASE_USERNAME` | MongoDB root user |\n| `DATABASE_PASSWORD` | MongoDB root password |\n\n## Connection Details\n\nServices communicate over the internal Docker network using these hostnames:\n\n- `mongo` — MongoDB connection (port 27017)\n- `maildev` — SMTP at port 1025, Web UI at port 1080\n- `redis` — Redis at port 6379 (when enabled)\n- `api` — Application port (defined in `document.test.Dockerfile`)","other-docker-compose-document-yaml":"# Other — docker-compose.document.yaml\n\n# docker-compose.document.yaml\n\nDevelopment environment orchestration for the document management service using Docker Compose.\n\n## Overview\n\nThis file defines the local development infrastructure for a document-based application. It spins up all required services—MongoDB, a database admin UI, an email testing service, and the API application—using containerized services with environment-driven configuration.\n\n## Architecture\n\n```mermaid\ngraph TB\n subgraph \"docker-compose network\"\n api[\"api (document.Dockerfile)\"]\n mongo[\"mongo (MongoDB 8.2.6)\"]\n mongo-express[\"mongo-express (Admin UI)\"]\n maildev[\"maildev (SMTP + Web UI)\"]\n end\n \n api --> mongo\n api --> maildev\n mongo-express --> mongo\n \n subgraph \"Host ports\"\n APP_PORT[\"APP_PORT → api\"]\n DATABASE_PORT[\"DATABASE_PORT → mongo\"]\n 8081[\"8081 → mongo-express\"]\n MAIL_CLIENT_PORT[\"MAIL_CLIENT_PORT → maildev web\"]\n MAIL_PORT[\"MAIL_PORT → maildev SMTP\"]\n end\n```\n\n## Services\n\n### `api`\n\nThe main application service. Built from `document.Dockerfile` in the current directory.\n\n- **Port**: `${APP_PORT}:${APP_PORT}` — maps the container port to the same host port (defined by environment variable)\n- **Dependencies**: Connects to MongoDB and Maildev at runtime\n\n### `mongo`\n\nMongoDB database instance for document storage.\n\n- **Image**: `mongo:8.2.6`\n- **Port**: `${DATABASE_PORT}:27017`\n- **Authentication**: Enabled via `MONGO_INITDB_ROOT_USERNAME` and `MONGO_INITDB_ROOT_PASSWORD` environment variables\n- **Persistence**: Data stored in the `boilerplate-mongo-db` Docker volume\n\n### `mongo-express`\n\nWeb-based MongoDB administration interface for development debugging.\n\n- **Image**: `mongo-express` (latest)\n- **Port**: `8081:8081` (fixed — not environment-driven)\n- **Authentication**: Same credentials as MongoDB via `ME_CONFIG_BASICAUTH_USERNAME` and `ME_CONFIG_BASICAUTH_PASSWORD`\n- **Connection**: Connects to `mongo` service using `mongodb://${DATABASE_USERNAME}:${DATABASE_PASSWORD}@mongo:${DATABASE_PORT}/`\n\n### `maildev`\n\nLocal SMTP server for testing email functionality without sending real emails.\n\n- **Build**: From `maildev.Dockerfile` in the current directory\n- **Ports**:\n - `${MAIL_CLIENT_PORT}:1080` — web interface for viewing captured emails\n - `${MAIL_PORT}:1025` — SMTP server for the application to send mail to\n\n### `redis` (commented)\n\nOptional Redis service for caching or session storage. Uncomment to enable.\n\n- **Image**: `redis:7-alpine`\n- **Port**: `6379:6379`\n\n## Environment Variables\n\nThe compose file references these environment variables (typically defined in a `.env` file):\n\n| Variable | Purpose |\n|----------|---------|\n| `APP_PORT` | Host port for the API application |\n| `DATABASE_PORT` | Host port for MongoDB |\n| `DATABASE_USERNAME` | MongoDB root username |\n| `DATABASE_PASSWORD` | MongoDB root password |\n| `MAIL_CLIENT_PORT` | Host port for Maildev web UI |\n| `MAIL_PORT` | Host port for Maildev SMTP |\n\n## Usage\n\n```bash\n# Start all services\ndocker-compose -f docker-compose.document.yaml up -d\n\n# View logs\ndocker-compose -f docker-compose.document.yaml logs -f api\n\n# Stop all services\ndocker-compose -f docker-compose.document.yaml down\n```\n\n## Development Workflow\n\n1. **Start the environment**: Run `docker-compose -f docker-compose.document.yaml up -d`\n2. **Access the API**: `http://localhost:${APP_PORT}`\n3. **View emails sent by the app**: `http://localhost:${MAIL_CLIENT_PORT}` (port 1080 by default)\n4. **Manage the database**: `http://localhost:8081`\n5. **Make code changes**: Rebuild the `api` service with `docker-compose -f docker-compose.document.yaml up -d --build api`\n\n## Volume\n\n- `boilerplate-mongo-db` — Persists MongoDB data across container restarts. Data survives `docker-compose down` but is removed with `docker-compose down -v`.","other-docker-compose-relational-ci-yaml":"# Other — docker-compose.relational.ci.yaml\n\n# docker-compose.relational.ci.yaml\n\n## Overview\n\nThis Docker Compose configuration defines the service stack for running end-to-end (e2e) tests against a relational database backend in a CI environment. It provisions a PostgreSQL database, mail testing infrastructure, and the API service under test.\n\n## Use Case\n\nThis compose file is invoked during CI pipelines to validate the application against a real PostgreSQL database rather than mocks. It supports:\n\n- **Relational database testing** — Tests run against actual PostgreSQL\n- **Email integration testing** — Maildev captures outgoing emails for verification\n- **E2E test execution** — The `api` service runs the relational e2e test suite\n\n## Services\n\n### postgres\n\n```yaml\npostgres:\n image: postgres:17.9-alpine\n expose:\n - 5432\n environment:\n POSTGRES_USER: ${DATABASE_USERNAME}\n POSTGRES_PASSWORD: ${DATABASE_PASSWORD}\n POSTGRES_DB: ${DATABASE_NAME}\n```\n\nThe PostgreSQL database container. Uses the Alpine variant for minimal image size.\n\n| Port | Purpose |\n|------|---------|\n| 5432 | PostgreSQL default port (internal to compose network) |\n\n**Configuration:** All database credentials are injected via environment variables, allowing CI to supply test-specific credentials without hardcoding secrets.\n\n---\n\n### maildev\n\n```yaml\nmaildev:\n build:\n context: .\n dockerfile: maildev.Dockerfile\n expose:\n - 1080\n - 1025\n```\n\nA custom-built Maildev instance for capturing and inspecting emails sent by the application during tests.\n\n| Port | Purpose |\n|------|---------|\n| 1080 | Web UI for viewing captured emails |\n| 1025 | SMTP server for receiving outgoing mail |\n\n**Build context:** The Dockerfile is located at the compose file's root directory (`.`).\n\n---\n\n### redis (commented)\n\n```yaml\n# redis:\n# image: redis:7-alpine\n# expose:\n# - 6379\n```\n\nRedis support is present but disabled by default. Uncomment to include Redis as a caching or session store backend for tests that require it.\n\n---\n\n### api\n\n```yaml\napi:\n build:\n context: .\n dockerfile: relational.e2e.Dockerfile\n env_file:\n - env-example-relational\n```\n\nThe main application service that executes the e2e test suite.\n\n| Property | Value |\n|----------|-------|\n| Build context | Current directory |\n| Dockerfile | `relational.e2e.Dockerfile` |\n| Environment file | `env-example-relational` |\n\nThis service depends on `postgres` and `maildev` being available (implicit dependency via service composition order and shared network).\n\n## Environment Variables\n\nThe compose file expects these variables to be defined in the CI environment:\n\n| Variable | Purpose |\n|----------|---------|\n| `DATABASE_USERNAME` | PostgreSQL user |\n| `DATABASE_PASSWORD` | PostgreSQL password |\n| `DATABASE_NAME` | Database name to create on startup |\n\nAdditional configuration is loaded from `env-example-relational`, which should contain application-specific settings such as connection strings and feature flags.\n\n## Usage\n\n```bash\n# Run the CI test stack\ndocker-compose -f docker-compose.relational.ci.yaml up --build\n\n# Run in detached mode (typical for CI)\ndocker-compose -f docker-compose.relational.ci.yaml up -d --build\n\n# View test output\ndocker-compose -f docker-compose.relational.ci.yaml logs api\n```\n\n## Architecture\n\n```mermaid\ngraph TD\n subgraph CI Test Stack\n A[api<br/>relational.e2e.Dockerfile] -->|JDBC| P[postgres<br/>:5432]\n A -->|SMTP| M[maildev<br/>:1025]\n end\n \n style A fill:#f9f,stroke:#333\n style P fill:#bbf,stroke:#333\n style M fill:#bfb,stroke:#333\n```\n\nThe `api` service is the test runner. It connects to `postgres` for data persistence and `maildev` for email verification during test execution.\n\n## Relationship to Other Files\n\n| File | Relationship |\n|------|--------------|\n| `maildev.Dockerfile` | Builds the maildev service image |\n| `relational.e2e.Dockerfile` | Builds the API test runner image |\n| `env-example-relational` | Template environment file for relational tests |\n| `docker-compose.*.yaml` | Other compose variants for different test scenarios (e.g., NoSQL, unit tests) |\n\n## Notes\n\n- All services use `expose` rather than `ports`, making them accessible only within the compose network. This is appropriate for CI where external access is not needed.\n- The `api` service does not explicitly declare `depends_on`. In production CI pipelines, ensure `postgres` and `maildev` are healthy before the test runner starts, either via health checks or orchestration wait scripts.\n- The Redis service is provided as a convenience for tests that require it; uncomment and configure as needed.","other-docker-compose-relational-test-yaml":"# Other — docker-compose.relational.test.yaml\n\n# docker-compose.relational.test.yaml\n\n## Overview\n\nThis Docker Compose configuration defines a complete test environment for the relational database portion of the application. It provisions a PostgreSQL database for data storage, Maildev for email testing, and mounts the application source and test directories for development and testing workflows.\n\n## Services\n\n### postgres\n\nA PostgreSQL 17.9 Alpine container serving as the relational database for tests.\n\n| Setting | Value |\n|---------|-------|\n| Image | postgres:17.9-alpine |\n| Exposed Port | 5432 |\n| Database | ${DATABASE_NAME} |\n| Username | ${DATABASE_USERNAME} |\n| Password | ${DATABASE_PASSWORD} |\n\nThe service reads database credentials from environment variables, allowing configuration without committing sensitive values to the repository.\n\n---\n\n### maildev\n\nAn email testing and development server that captures outgoing emails and provides a web interface for inspection.\n\n| Setting | Value |\n|---------|-------|\n| Build Context | Current directory |\n| Dockerfile | maildev.Dockerfile |\n| Web Interface Port | 1080 |\n| SMTP Port | 1025 |\n\nThe service uses a custom Dockerfile (`maildev.Dockerfile`) rather than a pre-built image, suggesting custom configuration or tooling for email testing.\n\n---\n\n### redis\n\n```\n# redis:\n# image: redis:7-alpine\n# expose:\n# - 6379\n```\n\nRedis is commented out by default. Uncomment to include a Redis 7 container if the application requires caching or session storage during tests.\n\n---\n\n### api\n\nThe main application service running in test mode.\n\n| Setting | Value |\n|---------|-------|\n| Build Context | Current directory |\n| Dockerfile | relational.test.Dockerfile |\n| Environment File | env-example-relational |\n| Mounted Volumes | `./src:/usr/src/app/src`, `./test:/usr/src/app/test` |\n\nThe API service mounts the source and test directories directly into the container, enabling hot-reload behavior during development. Changes to files on the host are immediately available inside the container without rebuilding.\n\n---\n\n## Usage\n\nStart the test environment:\n\n```bash\ndocker-compose -f docker-compose.relational.test.yaml up\n```\n\nStart with rebuilds:\n\n```bash\ndocker-compose -f docker-compose.relational.test.yaml up --build\n```\n\nRun tests only (after services are running):\n\n```bash\ndocker-compose -f docker-compose.relational.test.yaml exec api npm test\n```\n\n## Configuration Requirements\n\nBefore running, ensure these environment variables are set (either in your shell or in a `.env` file):\n\n- `DATABASE_USERNAME` — PostgreSQL user\n- `DATABASE_PASSWORD` — PostgreSQL password\n- `DATABASE_NAME` — Name of the test database\n\nThe `env-example-relational` file should be copied to `.env` or adjusted to match your local environment:\n\n```bash\ncp env-example-relational .env\n```\n\n## Architecture\n\n```mermaid\nflowchart TB\n subgraph Test Environment\n API[api<br/>relational.test.Dockerfile]\n PG[postgres<br/>5432]\n MD[maildev<br/>1080, 1025]\n end\n \n API --> |reads/writes| PG\n API --> |sends email| MD\n \n HostSrc[\"Host: ./src\"] -.-> |mount| API\n HostTest[\"Host: ./test\"] -.-> |mount| API\n```\n\nThe API service is the primary consumer—it connects to PostgreSQL for data persistence and Maildev for testing email functionality without sending real emails. The host machine's source and test directories are mounted into the API container for rapid iteration during test-driven development.","other-docker-compose-yaml":"# Other — docker-compose.yaml\n\n# Docker Compose Configuration\n\n## Overview\n\nThis `docker-compose.yaml` defines the complete local development environment for the application. It orchestrates five services: a PostgreSQL database, Maildev for email testing, Adminer for database administration, and the main API application. Redis is available but commented out by default.\n\n## Architecture\n\n```mermaid\ngraph TB\n subgraph \"Docker Network\"\n API[api<br/>:APP_PORT]\n DB[postgres<br/>:5432]\n MAIL[maildev<br/>:1025/1080]\n ADMIN[adminer<br/>:8080]\n end\n \n API --> |reads/writes| DB\n API --> |sends email| MAIL\n Developer --> |manage data| ADMIN\n Developer --> |access app| API\n```\n\n## Services\n\n### PostgreSQL Database\n\n```yaml\npostgres:\n image: postgres:17.9-alpine\n ports:\n - ${DATABASE_PORT}:5432\n volumes:\n - boilerplate-db:/var/lib/postgresql/data\n environment:\n POSTGRES_USER: ${DATABASE_USERNAME}\n POSTGRES_PASSWORD: ${DATABASE_PASSWORD}\n POSTGRES_DB: ${DATABASE_NAME}\n```\n\nThe primary data store using PostgreSQL 17.9 (Alpine variant for smaller image size). Data persists in the `boilerplate-db` named volume, surviving container restarts.\n\n**Environment Variables:**\n| Variable | Purpose |\n|----------|---------|\n| `DATABASE_PORT` | Host port mapping (default: 5432) |\n| `DATABASE_USERNAME` | PostgreSQL superuser name |\n| `DATABASE_PASSWORD` | Superuser password |\n| `DATABASE_NAME` | Initial database to create |\n\n### Maildev\n\n```yaml\nmaildev:\n build:\n context: .\n dockerfile: maildev.Dockerfile\n ports:\n - ${MAIL_CLIENT_PORT}:1080\n - ${MAIL_PORT}:1025\n```\n\nA local SMTP server and web interface for testing email functionality. The application sends emails here during development instead of to real addresses.\n\n**Ports:**\n| Port | Service |\n|------|---------|\n| `${MAIL_CLIENT_PORT}` | Web UI (access at `http://localhost:${MAIL_CLIENT_PORT}`) |\n| `${MAIL_PORT}` | SMTP server (configure your app to use this as the mail host) |\n\n### Adminer\n\n```yaml\nadminer:\n image: adminer\n restart: always\n ports:\n - 8080:8080\n```\n\nA lightweight database administration interface. Access at `http://localhost:8080` and connect to the PostgreSQL service using:\n- **System:** PostgreSQL\n- **Server:** `postgres`\n- **Username:** `${DATABASE_USERNAME}`\n- **Password:** `${DATABASE_PASSWORD}`\n- **Database:** `${DATABASE_NAME}`\n\n### API Application\n\n```yaml\napi:\n build:\n context: .\n dockerfile: Dockerfile\n ports:\n - ${APP_PORT}:${APP_PORT}\n```\n\nThe main application service. The Dockerfile in the project root builds this image. The port mapping exposes the application on `${APP_PORT}`.\n\n### Redis (Optional)\n\n```yaml\n# redis:\n# image: redis:7-alpine\n# ports:\n# - 6379:6379\n```\n\nUncomment to enable Redis for caching or session storage. Requires corresponding configuration in the API service.\n\n## Configuration\n\nAll configurable values come from environment variables. Create a `.env` file in the project root:\n\n```bash\n# Database\nDATABASE_PORT=5432\nDATABASE_USERNAME=postgres\nDATABASE_PASSWORD=your_secure_password\nDATABASE_NAME=appdb\n\n# Maildev\nMAIL_CLIENT_PORT=1080\nMAIL_PORT=1025\n\n# Application\nAPP_PORT=3000\n```\n\n## Usage\n\n### Starting the Environment\n\n```bash\ndocker-compose up -d\n```\n\nAdd `-d` to run detached. View logs with:\n\n```bash\ndocker-compose logs -f\n```\n\n### Stopping the Environment\n\n```bash\ndocker-compose down\n```\n\nUse `-v` to also remove volumes (destroys database data):\n\n```bash\ndocker-compose down -v\n```\n\n### Rebuilding Services\n\nWhen Dockerfile changes occur:\n\n```bash\ndocker-compose up -d --build\n```\n\n## Service Dependencies\n\nThe `api` service depends on `postgres` being ready. Docker Compose handles this automatically, but application-level retry logic is recommended for production-style startup sequences.\n\n## Volume\n\n```yaml\nvolumes:\n boilerplate-db:\n```\n\nA named volume preserving PostgreSQL data across container lifecycle changes. Mounts to `/var/lib/postgresql/data` inside the container.","other-dockerfile":"# Other — Dockerfile\n\n# Dockerfile — Relational Database Variant\n\n## Overview\n\nThis Dockerfile builds the container image for the NestJS application's **relational database** variant. It produces a production-ready container that includes all dependencies, compiled TypeScript code, and startup scripts for orchestrating service initialization.\n\nThe \"relational\" designation (evident in `env-example-relational` and `startup.relational.dev.sh`) indicates this image is configured for use with traditional relational databases (PostgreSQL, MySQL, etc.) rather than alternative data stores.\n\n## Base Image\n\n```dockerfile\nFROM node:24.14.1-alpine\n```\n\nThe image uses `node:24.14.1-alpine`, a minimal Linux distribution with Node.js pre-installed. Alpine keeps the final image size small while providing a complete Node.js runtime.\n\n## Build Process\n\n### 1. System Dependencies\n\n```dockerfile\nRUN apk add --no-cache bash\nRUN npm i -g @nestjs/cli typescript ts-node\n```\n\n- **bash**: Installed via Alpine's package manager. Required by the startup scripts.\n- **@nestjs/cli**: NestJS command-line interface for building and managing the application.\n- **typescript**: TypeScript compiler available globally.\n- **ts-node**: TypeScript execution engine for Node.js (used during development/build).\n\n### 2. Dependency Installation\n\n```dockerfile\nCOPY package*.json /tmp/app/\nRUN cd /tmp/app && npm install\n\nCOPY . /usr/src/app\nRUN cp -a /tmp/app/node_modules /usr/src/app\n```\n\nThis follows Docker best practices for layer caching:\n\n1. `package.json` and `package-lock.json` are copied to a temporary directory first\n2. `npm install` runs in that temp location, creating the `node_modules` directory\n3. The full application source is copied to `/usr/src/app`\n4. `node_modules` is copied from the temp location to the final destination\n\nThis approach ensures that dependency installation doesn't re-run on every source code change—only when `package*.json` changes.\n\n### 3. Helper Scripts\n\n```dockerfile\nCOPY ./wait-for-it.sh /opt/wait-for-it.sh\nCOPY ./startup.relational.dev.sh /opt/startup.relational.dev.sh\nRUN chmod +x /opt/wait-for-it.sh\nRUN chmod +x /opt/startup.relational.dev.sh\nRUN sed -i 's/\\r//g' /opt/wait-for-it.sh\nRUN sed -i 's/\\r//g' /opt/startup.relational.dev.sh\n```\n\nTwo scripts are copied to `/opt/`:\n\n- **wait-for-it.sh**: A utility that blocks execution until a service (typically a database) is ready. This prevents the application from starting before its dependencies are available.\n- **startup.relational.dev.sh**: The main startup orchestration script for the relational variant.\n\nThe `sed` commands remove Windows-style line endings (CRLF → LF), ensuring the scripts execute correctly in the Linux container.\n\n### 4. Environment Configuration\n\n```dockerfile\nWORKDIR /usr/src/app\nRUN if [ ! -f .env ]; then cp env-example-relational .env; fi\n```\n\n- Sets the working directory to `/usr/src/app`\n- If no `.env` file exists, copies `env-example-relational` to create the default environment configuration\n\n### 5. Build\n\n```dockerfile\nRUN npm run build\n```\n\nCompiles the TypeScript source code. The NestJS CLI reads the `nest-cli.json` configuration to determine what gets built (typically the `dist/` or `build/` directory).\n\n### 6. Startup Command\n\n```dockerfile\nCMD [\"/opt/startup.relational.dev.sh\"]\n```\n\nThe container starts by executing the startup script, which handles service orchestration before launching the NestJS application.\n\n## Image Structure\n\n```\n/usr/src/app/ # Application source and built artifacts\n ├── dist/ # Compiled JavaScript (from npm run build)\n ├── node_modules/ # Dependencies\n └── .env # Environment variables (created if missing)\n\n/opt/\n ├── wait-for-it.sh # Service readiness checker\n └── startup.relational.dev.sh # Startup orchestrator\n```\n\n## Usage\n\n### Building the Image\n\n```bash\ndocker build -t app-relational .\n```\n\n### Running the Container\n\n```bash\ndocker run app-relational\n```\n\n### Overriding Environment Variables\n\nMount a custom `.env` file or set variables at runtime:\n\n```bash\ndocker run -e DATABASE_URL=postgres://user:pass@host:5432/db app-relational\n```\n\n## Dependencies and Prerequisites\n\nThe container expects the following external services to be available:\n\n- **Relational database** (PostgreSQL, MySQL, etc.) — The `wait-for-it.sh` script typically polls for database availability before the application starts\n- **Environment variables** — Database connection strings, API keys, and other configuration must be provided via `.env` or environment variables\n\n## Key Considerations\n\n| Aspect | Detail |\n|--------|--------|\n| **Node version** | 24.14.1 (latest stable at time of image creation) |\n| **Base size** | Alpine-based for minimal footprint |\n| **Build artifact** | TypeScript compiled to JavaScript via NestJS CLI |\n| **Startup behavior** | Blocks until database is reachable |\n| **Non-root** | Runs as default Node.js user (not explicitly set, but Alpine Node images run as non-root by default) |","other-docs":"# Other — docs\n\n# Docs Module\n\nThis module contains the project's documentation suite, covering installation, architecture, API usage, and operational guides. The documentation is organized as Markdown files in the `docs/` directory.\n\n## Module Overview\n\nThe docs module serves as the primary reference for developers and operators working with the application. It covers:\n\n- **Installation & Setup** — Environment prerequisites, Docker services, and application initialization\n- **Architecture** — Hexagonal architecture patterns and module structure\n- **Authentication** — Email-based and external OAuth flows, JWT strategy, refresh tokens\n- **Database** — TypeORM (PostgreSQL/MySQL) and Mongoose (MongoDB) configuration\n- **CLI** — Code generation commands for resources and properties\n- **Symphony Integration** — The automated coding agent orchestration system\n- **Operations** — Benchmarking, testing, and dependency management\n\n## Documentation Index\n\n```mermaid\ngraph TD\n A[Docs Module] --> B[Getting Started]\n A --> C[Core Concepts]\n A --> D[API Reference]\n A --> E[Operations]\n \n B --> B1[INSTALL_VERIFY.md]\n B --> B2[architecture.md]\n \n C --> C1[auth.md]\n C --> C2[database.md]\n C --> C3[cli.md]\n \n D --> D1[SPEC.md]\n \n E --> E1[benchmarking.md]\n E --> E2[automatic-update-dependencies.md]\n E --> E3[design-review-1-opus.md]\n```\n\n### Getting Started\n\n| Document | Purpose |\n|----------|---------|\n| [INSTALL_VERIFY.md](./INSTALL_VERIFY.md) | Step-by-step installation, Docker setup, and manual feature verification |\n| [architecture.md](./architecture.md) | Hexagonal architecture pattern, module structure, and domain organization |\n\n### Core Concepts\n\n| Document | Purpose |\n|----------|---------|\n| [auth.md](./auth.md) | Authentication flows (email, Apple, Facebook, Google), JWT strategy, refresh tokens |\n| [database.md](./database.md) | Database configuration, migrations, seeding, performance optimization |\n| [cli.md](./cli.md) | Command-line tools for generating resources and adding properties |\n\n### API Reference\n\n| Document | Purpose |\n|----------|---------|\n| [SPEC.md](./SPEC.md) | Symphony service specification — orchestration, workspace management, agent integration |\n\n### Operations\n\n| Document | Purpose |\n|----------|---------|\n| [benchmarking.md](./benchmarking.md) | Apache Benchmark setup for load testing |\n| [automatic-update-dependencies.md](./automatic-update-dependencies.md) | Renovate integration for automated dependency updates |\n| [design-review-1-opus.md](./design-review-1-opus.md) | Design review for Symphony Adaptation, covering Plane.so and Jules CLI integration |\n\n## Key Documentation Relationships\n\n### Installation Flow\n\n```\nINSTALL_VERIFY.md → architecture.md → auth.md → database.md\n```\n\nThe installation guide establishes a running application. The architecture document explains the design patterns used. Authentication and database docs cover the two primary infrastructure integrations.\n\n### Development Flow\n\n```\narchitecture.md → cli.md → database.md → auth.md\n```\n\nDevelopers use the CLI to generate resources following the hexagonal architecture pattern. Database configuration follows, then authentication integration.\n\n### Symphony Integration\n\n```\nINSTALL_VERIFY.md (Section 5) → SPEC.md → design-review-1-opus.md\n```\n\nSection 5 of the installation guide covers Symphony verification. The SPEC.md defines the complete service specification. The design review document provides context for the Plane.so and Jules integration decisions.\n\n## Document Conventions\n\n### Code Blocks\n\nAll documents use fenced code blocks with language identifiers:\n\n```bash\n# Shell commands\nnpm run start:dev\n\n# Environment configuration\nDATABASE_HOST=localhost\n\n# JSON payloads\n{\"email\": \"test@example.com\", \"password\": \"secret\"}\n```\n\n### Configuration Examples\n\nEnvironment and YAML configurations use consistent formatting:\n\n```env\n# Relational database\nDATABASE_HOST=localhost\nDATABASE_PORT=5432\n```\n\n```yaml\n# Workflow configuration\ntracker:\n kind: linear\n endpoint: https://api.linear.app/graphql\n api_key: $LINEAR_API_KEY\n```\n\n### API References\n\nREST endpoints follow the project's convention:\n\n```\nPOST /api/v1/auth/email/register\nGET /api/v1/auth/me\nPOST /api/v1/auth/refresh\n```\n\n## Contributing to Documentation\n\nWhen adding new features:\n\n1. **Update relevant docs** — New features should have corresponding documentation\n2. **Follow existing structure** — Use the same heading levels and formatting conventions\n3. **Include code examples** — Show actual commands, configurations, and API calls\n4. **Cross-reference** — Link to related documents where appropriate\n\n### Documentation Style\n\n- Use clear, direct language\n- Prefer active voice\n- Include practical examples\n- Show expected outputs when helpful\n- Document both success and error cases\n\n## Related Modules\n\nThe docs module references several code modules:\n\n- **Auth Module** — `src/auth/` — JWT strategy, sessions, external logins\n- **Users Module** — `src/users/` — User entity and repository patterns\n- **Database** — `src/database/` — Migrations, seeds, connection handling\n- **Symphony** — `src/symphony/` — Orchestrator, workspace manager, tracker client","other-document-dockerfile":"# Other — document.Dockerfile\n\n# document.Dockerfile\n\n## Overview\n\nThis Dockerfile builds the container image for the **document service** — a NestJS-based microservice in the application ecosystem. It produces a production-ready container that runs the compiled application with all necessary dependencies and configuration.\n\n## Base Image\n\n```dockerfile\nFROM node:24.14.1-alpine\n```\n\nThe image uses Node.js 24.14.1 on Alpine Linux, a lightweight minimal distribution. Alpine is chosen for its small footprint (~5MB base), which results in faster image pulls and reduced attack surface.\n\n## Installation Phase\n\n```dockerfile\nRUN apk add --no-cache bash\nRUN npm i -g @nestjs/cli typescript ts-node\n```\n\nTwo installation steps occur:\n\n1. **bash** — Added because some startup scripts (particularly `wait-for-it.sh`) require bash features not available in Alpine's default `/bin/sh`\n2. **Global npm packages** — `@nestjs/cli`, `typescript`, and `ts-node` are installed globally to support build and development workflows within the container\n\n## Dependency Installation\n\n```dockerfile\nCOPY package*.json /tmp/app/\nRUN cd /tmp/app && npm install\n\nCOPY . /usr/src/app\nRUN cp -a /tmp/app/node_modules /usr/src/app\n```\n\nDependencies are installed in a two-step process for Docker layer caching efficiency:\n\n1. `package.json` and `package-lock.json` are copied to a temporary location and `npm install` runs there\n2. The full source code is copied to the final location\n3. `node_modules` from the temp directory is copied to the application directory\n\nThis approach ensures that dependency installation doesn't need to re-run when only source files change.\n\n## Helper Scripts\n\n```dockerfile\nCOPY ./wait-for-it.sh /opt/wait-for-it.sh\nCOPY ./startup.document.dev.sh /opt/startup.document.dev.sh\nRUN chmod +x /opt/wait-for-it.sh\nRUN chmod +x /opt/startup.document.dev.sh\nRUN sed -i 's/\\r//g' /opt/wait-for-it.sh\nRUN sed -i 's/\\r//g' /opt/startup.document.dev.sh\n```\n\nTwo scripts are copied to `/opt/`:\n\n| Script | Purpose |\n|--------|---------|\n| `wait-for-it.sh` | Waits for external services (e.g., databases) to be ready before starting the application |\n| `startup.document.dev.sh` | Main startup script that launches the document service |\n\nThe `sed` commands strip Windows-style carriage returns (`\\r`), which can cause script execution failures on Linux.\n\n## Environment Configuration\n\n```dockerfile\nRUN if [ ! -f .env ]; then cp env-example-document .env; fi\n```\n\nIf `.env` does not exist, it is created from `env-example-document`. This ensures the container has a valid environment configuration on first run.\n\n## Build Phase\n\n```dockerfile\nRUN npm run build\n```\n\nThe NestJS application is compiled. This runs the `build` script from `package.json`, which typically uses `@nestjs/cli` to compile TypeScript to JavaScript.\n\n## Startup\n\n```dockerfile\nCMD [\"/opt/startup.document.dev.sh\"]\n```\n\nThe container starts by executing the startup script. This script typically:\n\n- Uses `wait-for-it.sh` to wait for dependencies (database, message queue, etc.)\n- Launches the NestJS application\n\n## Architecture Diagram\n\n```mermaid\nflowchart TD\n A[Node:24.14.1-Alpine] --> B[Install bash & global packages]\n B --> C[Install npm dependencies]\n C --> D[Copy source code & scripts]\n D --> E[Create .env if missing]\n E --> F[Build application]\n F --> G[Run startup script]\n G --> H[Container running]\n```\n\n## Required Files\n\nFor the build to succeed, these files must exist in the build context:\n\n| File | Purpose |\n|------|---------|\n| `package.json` | NPM dependencies and scripts |\n| `package-lock.json` | Locked dependency versions |\n| `env-example-document` | Template for environment variables |\n| `wait-for-it.sh` | Service readiness checker |\n| `startup.document.dev.sh` | Application startup script |\n\n## Configuration Notes\n\n- The `.env` file is created at container runtime if missing, using `env-example-document` as a template\n- The startup script handles waiting for external dependencies — do not modify the `CMD` to run `npm start` directly, as dependencies may not be ready\n- The container runs as root (default behavior). For production deployments, consider adding a non-root user for security","other-document-e2e-dockerfile":"# Other — document.e2e.Dockerfile\n\n# document.e2e.Dockerfile\n\n## Overview\n\nThis Dockerfile builds the containerized environment for running end-to-end (E2E) tests on the document service. It sets up a complete NestJS application with all dependencies, build tooling, and startup scripts required to execute the test suite in an isolated Docker environment.\n\n## Base Image\n\n```dockerfile\nFROM node:24.14.1-alpine\n```\n\nThe image uses Node.js 24.14.1 on Alpine Linux, a minimal distribution that keeps the container lightweight while providing the latest Node.js features.\n\n## Key Components\n\n### System Dependencies\n\n```dockerfile\nRUN apk add --no-cache bash\n```\n\nBash is installed to support the shell scripts used for service orchestration. Alpine ships with `sh` (BusyBox ash) by default, but some test scripts require Bash-specific syntax.\n\n### Global npm Packages\n\n```dockerfile\nRUN npm i -g @nestjs/cli typescript ts-node\n```\n\nThree packages are installed globally:\n\n- **@nestjs/cli** — NestJS command-line interface for generating and managing NestJS projects\n- **typescript** — TypeScript compiler for building the application\n- **ts-node** — TypeScript execution engine for running Node.js applications directly from TypeScript source\n\n### Dependency Installation\n\n```dockerfile\nCOPY package*.json /tmp/app/\nRUN cd /tmp/app && npm install\n\nCOPY . /usr/src/app\nRUN cp -a /tmp/app/node_modules /usr/src/app\n```\n\nDependencies are installed in a temporary directory first, then copied to the final location. This Docker pattern (installing before copying source code) enables layer caching — if `package*.json` hasn't changed, Docker reuses the cached dependency layer even when source code changes.\n\n### Shell Scripts\n\nTwo scripts are copied into the container:\n\n| Script | Purpose |\n|--------|---------|\n| `wait-for-it.sh` | Blocks execution until a service (typically a database) is ready to accept connections |\n| `startup.document.ci.sh` | Orchestrates the E2E test startup sequence |\n\n```dockerfile\nCOPY ./wait-for-it.sh /opt/wait-for-it.sh\nCOPY ./startup.document.ci.sh /opt/startup.document.ci.sh\nRUN chmod +x /opt/wait-for-it.sh\nRUN chmod +x /opt/startup.document.ci.sh\nRUN sed -i 's/\\r//g' /opt/wait-for-it.sh\nRUN sed -i 's/\\r//g' /opt/startup.document.ci.sh\n```\n\nThe `sed` commands remove Windows-style carriage returns (`\\r\\n`) that may be introduced if scripts were edited on Windows, ensuring consistent line ending behavior in the container.\n\n### Build Process\n\n```dockerfile\nWORKDIR /usr/src/app\nRUN echo \"\" > .env\nRUN npm run build\n```\n\nThe application is built using the `npm run build` command, which invokes the TypeScript compiler via the NestJS CLI. An empty `.env` file is created to prevent missing environment file errors during startup.\n\n### Entrypoint\n\n```dockerfile\nCMD [\"/opt/startup.document.ci.sh\"]\n```\n\nThe container starts by executing the startup script, which typically:\n\n1. Waits for dependent services (databases, message queues) using `wait-for-it.sh`\n2. Runs database migrations or seed data\n3. Executes the E2E test suite\n\n## Usage\n\nBuild and run the E2E test container:\n\n```bash\ndocker build -f document.e2e.Dockerfile -t document-e2e .\ndocker run document-e2e\n```\n\nOr with docker-compose:\n\n```yaml\nservices:\n document-e2e:\n build:\n context: .\n dockerfile: document.e2e.Dockerfile\n```\n\n## Integration with CI/CD\n\nThis Dockerfile is designed for continuous integration environments. The startup script (`startup.document.ci.sh`) handles the complete test lifecycle:\n\n1. Service dependency resolution\n2. Test database setup\n3. Test execution\n4. Exit code propagation (pass/fail)\n\nThe container produces a non-zero exit code if any test fails, enabling CI systems to detect test failures and fail the build appropriately.\n\n## File Dependencies\n\nThe Dockerfile expects these files in the build context:\n\n- `package.json` and `package-lock.json` — Application dependencies\n- `wait-for-it.sh` — Service readiness checker\n- `startup.document.ci.sh` — Test orchestration script\n- Source code in the current directory — Copied to `/usr/src/app`\n\n## Notes\n\n- The container uses Alpine Linux for minimal size but includes Bash for script compatibility\n- Node.js version 24.14.1 provides the latest LTS features at the time of image creation\n- Layer ordering optimizes rebuild times during development — source code changes don't invalidate the dependency cache","other-document-test-dockerfile":"# Other — document.test.Dockerfile\n\n# document.test.Dockerfile\n\n## Overview\n\nThis Dockerfile builds a containerized test environment for the document service. It creates a self-contained Node.js application with all dependencies, scripts, and configuration needed to run integration or end-to-end tests for the document module.\n\n## Base Image\n\n```dockerfile\nFROM node:24.14.1-alpine\n```\n\nThe container uses Node.js 24.14.1 on Alpine Linux, providing a minimal, fast-loading runtime environment. Alpine Linux keeps the image size small while delivering a full Node.js ecosystem.\n\n## Dependencies\n\n### System Packages\n\n```dockerfile\nRUN apk add --no-cache bash\n```\n\nBash is installed to execute the startup and wait scripts. While Alpine ships with `/bin/sh`, the included scripts require Bash-specific syntax.\n\n### Global npm Packages\n\n```dockerfile\nRUN npm i -g @nestjs/cli typescript ts-node\n```\n\n| Package | Purpose |\n|---------|---------|\n| `@nestjs/cli` | NestJS CLI for generating and managing NestJS applications |\n| `typescript` | TypeScript compiler for type-checking and transpilation |\n| `ts-node` | Execute TypeScript directly without pre-compilation |\n\nThese packages enable running the NestJS application in development mode without a separate build step.\n\n## Application Setup\n\n### Dependency Installation\n\n```dockerfile\nCOPY package*.json /tmp/app/\nRUN cd /tmp/app && npm install\n```\n\nThe build process copies only `package.json` and `package-lock.json` first, then runs `npm install`. This leverages Docker's layer caching—if the lock file hasn't changed, Docker reuses the cached dependency layer rather than reinstalling everything.\n\n### Code Deployment\n\n```dockerfile\nCOPY . /usr/src/app\n```\n\nThe entire application source code is copied into the container at `/usr/src/app`.\n\n## Supporting Scripts\n\nTwo shell scripts are copied and prepared for execution:\n\n```dockerfile\nCOPY ./wait-for-it.sh /opt/wait-for-it.sh\nRUN chmod +x /opt/wait-for-it.sh\nCOPY ./startup.document.test.sh /opt/startup.document.test.sh\nRUN chmod +x /opt/startup.document.test.sh\nRUN sed -i 's/\\r//g' /opt/wait-for-it.sh\nRUN sed -i 's/\\r//g' /opt/startup.document.test.sh\n```\n\n| Script | Location | Purpose |\n|--------|----------|---------|\n| `wait-for-it.sh` | `/opt/wait-for-it.sh` | Blocks until a service (typically a database) is ready |\n| `startup.document.test.sh` | `/opt/startup.document.test.sh` | Main entry point that orchestrates test execution |\n\nThe `sed` commands remove Windows-style carriage returns (`\\r`), ensuring the scripts run correctly on Linux regardless of how they were edited on the host system.\n\n## Environment Configuration\n\n```dockerfile\nRUN echo \"\" > .env\n```\n\nAn empty `.env` file is created in the working directory. This ensures the application has an environment file to read from, even if no explicit variables are set. Tests may populate this with required configuration at runtime.\n\n## Working Directory\n\n```dockerfile\nWORKDIR /usr/src/app\n```\n\nSets the active directory for subsequent commands and the default location where the startup script executes.\n\n## Startup Command\n\n```dockerfile\nCMD [\"/opt/startup.document.test.sh\"]\n```\n\nThe container launches by executing the startup script. This script typically:\n\n1. Waits for dependent services (databases, message queues) using `wait-for-it.sh`\n2. Starts the NestJS application or runs the test suite directly\n3. Captures and reports test results\n\n## Usage\n\n### Building the Image\n\n```bash\ndocker build -f document.test.Dockerfile -t document-test .\n```\n\n### Running Tests\n\n```bash\ndocker run --rm document-test\n```\n\n### Running with Service Dependencies\n\n```bash\ndocker-compose up --build document-test\n```\n\nWhen used with Docker Compose, the `wait-for-it.sh` script ensures the container waits for databases or other services defined in the compose file before executing tests.\n\n## Architecture Notes\n\nThis Dockerfile follows a standard pattern for containerized testing:\n\n1. **Minimal base** — Alpine Linux keeps the footprint small\n2. **Layer optimization** — Dependencies installed before source code maximizes cache reuse\n3. **Script-based startup** — Allows complex orchestration (waiting for services, setup steps) without modifying the image\n4. **Empty .env** — Provides a placeholder that can be overridden at runtime\n\nThe container is designed to be ephemeral—run tests, report results, then terminate. It does not persist state between runs.","other-env-example-document":"# Other — env-example-document\n\n# Environment Configuration Template\n\n## Overview\n\nThis file (`env-example-document`) serves as a template for all environment variables required by the NestJS API application. It provides default development values and documents every configuration option available to developers.\n\nThis is not a runtime configuration file—it's a reference template. Developers should copy these variables into a `.env` file and adjust values for their specific environment.\n\n---\n\n## Configuration Sections\n\n### Application Core\n\n| Variable | Description | Default |\n|----------|-------------|---------|\n| `NODE_ENV` | Runtime environment | `development` |\n| `APP_PORT` | HTTP server port | `3000` |\n| `APP_NAME` | Application display name | `NestJS API` |\n| `API_PREFIX` | URL prefix for all API routes | `api` |\n| `APP_FALLBACK_LANGUAGE` | Default language when none specified | `en` |\n| `APP_HEADER_LANGUAGE` | Custom header for language selection | `x-custom-lang` |\n| `FRONTEND_DOMAIN` | Frontend application URL | `http://localhost:3000` |\n| `BACKEND_DOMAIN` | Backend application URL | `http://localhost:3000` |\n\n**Usage in code:** These values are typically injected via `@nestjs/config` and used throughout the application for server configuration, i18n, and CORS settings.\n\n---\n\n### Database Configuration\n\n| Variable | Description | Default |\n|----------|-------------|---------|\n| `DATABASE_TYPE` | Database driver | `mongodb` |\n| `DATABASE_PORT` | Database port | `27017` |\n| `DATABASE_USERNAME` | Connection username | `root` |\n| `DATABASE_PASSWORD` | Connection password | `secret` |\n| `DATABASE_NAME` | Database name | `api` |\n| `DATABASE_URL` | Full connection string (overrides individual params) | `mongodb://mongo:27017` |\n\n**Usage in code:** The TypeORM or Mongoose module reads these values to establish database connections. When `DATABASE_URL` is provided, it takes precedence over individual connection parameters.\n\n---\n\n### File Storage\n\n| Variable | Description | Default |\n|----------|-------------|---------|\n| `FILE_DRIVER` | Storage provider | `local` |\n| `ACCESS_KEY_ID` | AWS access key (S3) | — |\n| `SECRET_ACCESS_KEY` | AWS secret key (S3) | — |\n| `AWS_S3_REGION` | AWS region | — |\n| `AWS_DEFAULT_S3_BUCKET` | S3 bucket name | — |\n\n**Supported drivers:** `local`, `s3`, `s3-presigned`\n\n- **local**: Files stored on server filesystem\n- **s3**: Files stored in AWS S3 with pre-signed URLs\n- **s3-presigned**: Same as s3, different URL expiration logic\n\n**Usage in code:** The file module uses this configuration to determine which storage service to instantiate. AWS credentials are required only when using S3-based drivers.\n\n---\n\n### Email / Mail\n\n| Variable | Description | Default |\n|----------|-------------|---------|\n| `MAIL_HOST` | SMTP server hostname | `maildev` |\n| `MAIL_PORT` | SMTP server port | `1025` |\n| `MAIL_USER` | SMTP authentication username | — |\n| `MAIL_PASSWORD` | SMTP authentication password | — |\n| `MAIL_IGNORE_TLS` | Skip TLS verification | `true` |\n| `MAIL_SECURE` | Use TLS/SSL | `false` |\n| `MAIL_REQUIRE_TLS` | Require TLS connection | `false` |\n| `MAIL_DEFAULT_EMAIL` | Sender email address | `noreply@example.com` |\n| `MAIL_DEFAULT_NAME` | Sender display name | `Api` |\n| `MAIL_CLIENT_PORT` | MailDev web UI port (development) | `1080` |\n\n**Usage in code:** The mail module configures Nodemailer with these parameters. In development, `maildev` (port 1025) provides a local SMTP server with a web interface at the `MAIL_CLIENT_PORT` for inspecting sent emails.\n\n---\n\n### Authentication & Security\n\n#### JWT Tokens\n\n| Variable | Description | Default |\n|----------|-------------|---------|\n| `AUTH_JWT_SECRET` | Secret key for access tokens | `secret` |\n| `AUTH_JWT_TOKEN_EXPIRES_IN` | Access token expiration | `15m` |\n| `AUTH_REFRESH_SECRET` | Secret key for refresh tokens | `secret_for_refresh` |\n| `AUTH_REFRESH_TOKEN_EXPIRES_IN` | Refresh token expiration | `3650d` (10 years) |\n| `AUTH_FORGOT_SECRET` | Secret key for password reset | `secret_for_forgot` |\n| `AUTH_FORGOT_TOKEN_EXPIRES_IN` | Password reset token expiration | `30m` |\n| `AUTH_CONFIRM_EMAIL_SECRET` | Secret key for email confirmation | `secret_for_confirm_email` |\n| `AUTH_CONFIRM_EMAIL_TOKEN_EXPIRES_IN` | Email confirmation token expiration | `1d` |\n\n**Usage in code:** The auth module uses these secrets to sign and verify various JWT token types. Each token purpose has its own secret for security isolation.\n\n> **Security Note:** Never use the default `secret` values in production. Generate strong, unique secrets for each token type.\n\n#### OAuth / Social Login\n\n**Facebook:**\n| Variable | Description |\n|----------|-------------|\n| `FACEBOOK_APP_ID` | Facebook Developer App ID |\n| `FACEBOOK_APP_SECRET` | Facebook Developer App Secret |\n\n**Google:**\n| Variable | Description |\n|----------|-------------|\n| `GOOGLE_CLIENT_ID` | Google OAuth Client ID |\n| `GOOGLE_CLIENT_SECRET` | Google OAuth Client Secret |\n\n**Apple:**\n| Variable | Description |\n|----------|-------------|\n| `APPLE_APP_AUDIENCE` | Apple App ID (array format) | `[]` |\n\n**Usage in code:** These credentials enable social login flows via Passport.js strategies. Leave empty to disable the respective provider.\n\n---\n\n### Worker / Queue\n\n| Variable | Description | Default |\n|----------|-------------|---------|\n| `WORKER_HOST` | Redis connection string for job queue | `redis://redis:6379/1` |\n\n**Usage in code:** The worker module connects to Redis for background job processing. The trailing `/1` indicates Redis database number 1 (separate from the default DB 0).\n\n---\n\n## Setup Instructions\n\n1. **Create your environment file:**\n ```bash\n cp env-example-document .env\n ```\n\n2. **Adjust values for your environment:**\n - Update database credentials\n - Set strong authentication secrets\n - Configure domain names for your deployment\n - Add OAuth credentials if using social login\n\n3. **Development with Docker:**\n The default values are configured for the Docker Compose setup included with this project. Ensure your `docker-compose.yml` service names match the hostnames in this file (e.g., `mongo`, `redis`, `maildev`).\n\n4. **Production requirements:**\n - Generate new JWT secrets (use a password generator or `openssl rand -base64 32`)\n - Configure real SMTP credentials\n - Set appropriate S3 credentials if using file storage\n - Update domain names to production URLs\n\n---\n\n## Variable Validation\n\nThe application validates required environment variables on startup. Missing or invalid values typically result in:\n\n- Application startup failure with descriptive error messages\n- Validation errors in the config module\n- Module initialization errors for unconfigured services\n\nRefer to the config module's validation schema for exact rules on each variable's format and requirements.","other-env-example-relational":"# Other — env-example-relational\n\n# Environment Configuration Reference\n\n## Overview\n\nThis file (`env-example-relational`) provides a complete example configuration for a NestJS relational API application. It defines all environment variables required for the application to function, covering application settings, database connectivity, file storage, email services, authentication, and worker processes.\n\nThis is a template file—copy it to `.env` in your project root and adjust values for your environment before starting the application.\n\n---\n\n## Application Configuration\n\n| Variable | Default | Description |\n|----------|---------|-------------|\n| `NODE_ENV` | `development` | Runtime environment: `development`, `production`, or `test` |\n| `APP_PORT` | `3000` | HTTP server port |\n| `APP_NAME` | `NestJS API` | Application display name |\n| `API_PREFIX` | `api` | URL prefix for all API routes (e.g., `/api/v1/...`) |\n| `APP_FALLBACK_LANGUAGE` | `en` | Default language when no header is specified |\n| `APP_HEADER_LANGUAGE` | `x-custom-lang` | Custom header name for language selection |\n| `FRONTEND_DOMAIN` | `http://localhost:3000` | Allowed frontend origin for CORS |\n| `BACKEND_DOMAIN` | `http://localhost:3000` | Backend base URL for generating absolute links |\n\n---\n\n## Database Configuration\n\nThe application supports PostgreSQL as the primary relational database.\n\n| Variable | Default | Description |\n|----------|---------|-------------|\n| `DATABASE_TYPE` | `postgres` | Database driver (currently only `postgres` is supported) |\n| `DATABASE_HOST` | `postgres` | Database server hostname |\n| `DATABASE_PORT` | `5432` | PostgreSQL default port |\n| `DATABASE_USERNAME` | `root` | Database user |\n| `DATABASE_PASSWORD` | `secret` | Database password |\n| `DATABASE_NAME` | `api` | Target database name |\n| `DATABASE_SYNCHRONIZE` | `false` | **Dangerous in production.** If `true`, TypeORM auto-syncs schema on startup. Disable in production. |\n| `DATABASE_MAX_CONNECTIONS` | `100` | Connection pool maximum size |\n| `DATABASE_SSL_ENABLED` | `false` | Enable SSL for database connections |\n| `DATABASE_REJECT_UNAUTHORIZED` | `false` | Reject invalid SSL certificates (set to `true` in production) |\n| `DATABASE_CA` | — | SSL CA certificate (PEM format) |\n| `DATABASE_KEY` | — | SSL private key (PEM format) |\n| `DATABASE_CERT` | — | SSL certificate (PEM format) |\n| `DATABASE_URL` | — | Alternative: full connection string (overrides individual parameters) |\n\n### SSL Configuration\n\nFor production deployments with SSL-enabled databases:\n\n```\nDATABASE_SSL_ENABLED=true\nDATABASE_REJECT_UNAUTHORIZED=true\nDATABASE_CA=-----BEGIN CERTIFICATE-----\n...\n-----END CERTIFICATE-----\n```\n\n---\n\n## File Storage Configuration\n\n| Variable | Default | Description |\n|----------|---------|-------------|\n| `FILE_DRIVER` | `local` | Storage backend: `local`, `s3`, or `s3-presigned` |\n| `ACCESS_KEY_ID` | — | AWS access key for S3 |\n| `SECRET_ACCESS_KEY` | — | AWS secret key for S3 |\n| `AWS_S3_REGION` | — | AWS region (e.g., `us-east-1`) |\n| `AWS_DEFAULT_S3_BUCKET` | — | S3 bucket name for file storage |\n\n### Driver Options\n\n- **`local`**: Files stored on server filesystem (default for development)\n- **`s3`**: Files stored in AWS S3, served through the API\n- **`s3-presigned`**: Files stored in AWS S3, served via pre-signed URLs (reduces server load)\n\n---\n\n## Mail Configuration\n\nEmail is handled through SMTP, with defaults configured for [Maildev](https://maildev.github.io/maildev/) (local development).\n\n| Variable | Default | Description |\n|----------|---------|-------------|\n| `MAIL_HOST` | `maildev` | SMTP server hostname |\n| `MAIL_PORT` | `1025` | SMTP server port |\n| `MAIL_USER` | — | SMTP authentication username |\n| `MAIL_PASSWORD` | — | SMTP authentication password |\n| `MAIL_IGNORE_TLS` | `true` | Ignore TLS errors (useful for local dev) |\n| `MAIL_SECURE` | `false` | Use implicit TLS (port 465) |\n| `MAIL_REQUIRE_TLS` | `false` | Require STARTTLS upgrade |\n| `MAIL_DEFAULT_EMAIL` | `noreply@example.com` | Sender email address |\n| `MAIL_DEFAULT_NAME` | `Api` | Sender display name |\n| `MAIL_CLIENT_PORT` | `1080` | Maildev web UI port for local development |\n\n---\n\n## Authentication & Security Configuration\n\nJWT-based authentication with separate secrets for different token types.\n\n| Variable | Default | Description |\n|----------|---------|-------------|\n| `AUTH_JWT_SECRET` | `secret` | Secret key for access tokens |\n| `AUTH_JWT_TOKEN_EXPIRES_IN` | `15m` | Access token expiration (`15m`, `1h`, etc.) |\n| `AUTH_REFRESH_SECRET` | `secret_for_refresh` | Secret key for refresh tokens |\n| `AUTH_REFRESH_TOKEN_EXPIRES_IN` | `3650d` | Refresh token expiration (10 years) |\n| `AUTH_FORGOT_SECRET` | `secret_for_forgot` | Secret key for password reset tokens |\n| `AUTH_FORGOT_TOKEN_EXPIRES_IN` | `30m` | Password reset token expiration |\n| `AUTH_CONFIRM_EMAIL_SECRET` | `secret_for_confirm_email` | Secret key for email confirmation tokens |\n| `AUTH_CONFIRM_EMAIL_TOKEN_EXPIRES_IN` | `1d` | Email confirmation token expiration |\n\n### Token Types\n\n| Token | Purpose | Typical Client Usage |\n|-------|---------|---------------------|\n| Access (`AUTH_JWT_*`) | API authorization | Included in `Authorization: Bearer <token>` header |\n| Refresh (`AUTH_REFRESH_*`) | Obtain new access tokens | Sent to `/auth/refresh` endpoint |\n| Forgot (`AUTH_FORGOT_*`) | Password reset flow | Sent to user email, exchanged for new password |\n| Confirm Email (`AUTH_CONFIRM_EMAIL_*`) | Email verification | Clicked from verification email |\n\n> **Security Note**: Replace all secret values with strong random strings in production. Use a secrets management tool (e.g., HashiCorp Vault, AWS Secrets Manager) rather than committing secrets to version control.\n\n---\n\n## Social Login Configuration\n\n### Facebook\n\n| Variable | Description |\n|----------|-------------|\n| `FACEBOOK_APP_ID` | Facebook OAuth application ID |\n| `FACEBOOK_APP_SECRET` | Facebook OAuth application secret |\n\n### Google\n\n| Variable | Description |\n|----------|-------------|\n| `GOOGLE_CLIENT_ID` | Google OAuth client ID |\n| `GOOGLE_CLIENT_SECRET` | Google OAuth client secret |\n\n### Apple\n\n| Variable | Default | Description |\n|----------|---------|-------------|\n| `APPLE_APP_AUDIENCE` | `[]` | Array of service identifiers for Apple Sign-In |\n\n---\n\n## Worker Configuration\n\n| Variable | Default | Description |\n|----------|---------|-------------|\n| `WORKER_HOST` | `redis://redis:6379/1` | Redis connection URL for background job processing |\n\nThe worker uses Redis as a message broker (likely Bull or BullMQ for job queues). The `/1` suffix selects Redis database number 1.\n\nFormat: `redis://[username:password@]host[:port][/database]`\n\n---\n\n## Docker Compose Integration\n\nThe default values are designed to work with the project's `docker-compose.yml`:\n\n```yaml\nservices:\n postgres:\n image: postgres\n # DATABASE_HOST=postgres matches this service name\n \n redis:\n image: redis\n # WORKER_HOST=redis://redis:6379/1 matches this service name\n \n maildev:\n image: maildev\n # MAIL_HOST=maildev, MAIL_PORT=1025, MAIL_CLIENT_PORT=1080 match defaults\n```\n\n---\n\n## Quick Start\n\n1. Copy this file to `.env` in your project root\n2. Adjust values for your environment (at minimum: `DATABASE_PASSWORD`, all `AUTH_*_SECRET` values)\n3. Ensure Docker services are running (`docker-compose up -d`)\n4. Start the application: `npm run start:dev`","other-eslint-config-mjs":"# Other — eslint.config.mjs\n\n# ESLint Configuration (`eslint.config.mjs`)\n\nThis module defines the ESLint configuration for the project using ESLint's flat config format. It enforces TypeScript-aware linting rules, integrates Prettier for code formatting, and includes custom rules specific to this codebase's patterns.\n\n## Overview\n\nThe configuration combines several tools:\n\n- **ESLint** — Linting engine\n- **@typescript-eslint** — TypeScript language support (parser + rules)\n- **Prettier** — Code formatting (via `eslint-config-prettier`)\n- **FlatCompat** — Bridge between legacy `.eslintrc` configs and flat config\n\n## Configuration Structure\n\n```mermaid\nflowchart TB\n subgraph \"eslint.config.mjs\"\n A[\"FlatCompat<br/>extends legacy configs\"] --> B[\"TypeScript ESLint<br/>recommended rules\"]\n A --> C[\"Prettier<br/>recommended rules\"]\n B --> D[\"Custom Rules\"]\n end\n \n E[\"TypeScript Parser\"] --> F[\"tsconfig.json<br/>project reference\"]\n E --> G[\"Node + Jest globals\"]\n \n D --> H[\"configService.get()<br/>requires infer:true\"]\n D --> I[\"Test descriptions<br/>must start with 'should'\"]\n```\n\n## Key Components\n\n### FlatCompat Layer\n\n```javascript\nconst compat = new FlatCompat({\n baseDirectory: __dirname,\n recommendedConfig: js.configs.recommended,\n allConfig: js.configs.all,\n});\n```\n\nThe `FlatCompat` utility allows extending from plugins that still use the legacy `.eslintrc` format (like `plugin:@typescript-eslint/recommended` and `plugin:prettier/recommended`).\n\n### Language Options\n\n```javascript\nlanguageOptions: {\n globals: {\n ...globals.node,\n ...globals.jest,\n },\n parser: tsParser,\n ecmaVersion: 5,\n sourceType: 'module',\n parserOptions: {\n project: 'tsconfig.json',\n tsconfigRootDir: __dirname,\n },\n}\n```\n\n- **Parser**: Uses `@typescript-eslint/parser` for TypeScript support\n- **Project mode**: Enables type-aware linting via `tsconfig.json`\n- **Globals**: Exposes Node.js and Jest global variables (e.g., `describe`, `it`, `expect`, `process`)\n\n## Rule Configuration\n\n### Disabled Rules\n\nSome strict TypeScript rules are disabled to reduce friction:\n\n| Rule | Reason |\n|------|--------|\n| `@typescript-eslint/interface-name-prefix` | Allows interfaces without `I` prefix |\n| `@typescript-eslint/explicit-function-return-type` | No requirement to annotate return types |\n| `@typescript-eslint/explicit-module-boundary-types` | No requirement to annotate export types |\n| `@typescript-eslint/no-explicit-any` | Allows `any` usage |\n| `no-unused-vars` (base) | Disabled to avoid conflicts |\n| `require-await` (base) | Disabled—uses TypeScript version instead |\n\n### Enabled Rules\n\n| Rule | Setting | Purpose |\n|------|---------|---------|\n| `@typescript-eslint/no-unused-vars` | `error` | Catches unused variables with TypeScript awareness |\n| `@typescript-eslint/require-await` | `error` | Requires `await` for async functions |\n| `@typescript-eslint/no-floating-promises` | `error` | Prevents unhandled Promises |\n\n## Custom Rules\n\n### 1. `configService.get()` Type Inference\n\n```javascript\n'no-restricted-syntax': [\n 'error',\n {\n selector: 'CallExpression[callee.object.name=configService][callee.property.name=/^(get|getOrThrow)$/]:not(:has([arguments.1] Property[key.name=infer][value.value=true]))',\n message: 'Add \"{ infer: true }\" to configService.get() for correct typechecking. Example: configService.get(\"database.port\",{ infer: true })',\n },\n // ...also handles configService.get() called as property access\n]\n```\n\nThis rule enforces that all `configService.get()` and `configService.getOrThrow()` calls include `{ infer: true }` as the second argument for proper type inference.\n\n**Required pattern:**\n```typescript\nconst port = configService.get('database.port', { infer: true });\n```\n\n**Disallowed pattern:**\n```typescript\nconst port = configService.get('database.port'); // ❌ Missing { infer: true }\n```\n\n### 2. Test Descriptions\n\n```javascript\n{\n selector: 'CallExpression[callee.name=it][arguments.0.value!=/^should/]',\n message: '\"it\" should start with \"should\"',\n}\n```\n\nAll `it()` test descriptions must start with \"should\":\n\n```typescript\nit('should handle invalid input', () => { ... }); // ✓\nit('handles invalid input', () => { ... }); // ✗\n```\n\n## Integration with Build Tools\n\nThis config is automatically picked up by:\n\n- **VS Code ESLint extension** — When configured to use flat config\n- **CLI** — Run via `npx eslint .`\n- **Pre-commit hooks** — If integrated with Husky or lefthook\n\nThe Prettier integration ensures no conflicts between ESLint and Prettier formatting rules.\n\n## Extending the Configuration\n\nTo add new rules or modify existing ones, edit the `rules` object:\n\n```javascript\nrules: {\n // Existing rules...\n '@typescript-eslint/no-unused-vars': ['error', { argsIgnorePattern: '^_' }],\n // Add new rules here\n}\n```\n\nTo extend additional configs:\n\n```javascript\nexport default [\n ...compat.extends(\n 'plugin:@typescript-eslint/recommended',\n 'plugin:prettier/recommended',\n // Add more here\n ),\n // ...rest of config\n];\n```","other-gemini-md":"# Other — GEMINI.md\n\n# NestJS REST API Boilerplate\n\nThis document serves as the primary entry point and architectural reference for the NestJS REST API Boilerplate project. It covers the project's purpose, technology stack, architectural patterns, and operational guidance.\n\n## Purpose\n\nThis boilerplate provides a production-ready foundation for building scalable REST APIs with NestJS. It implements **Hexagonal Architecture** (also known as Ports and Adapters) to ensure strict separation between business logic and infrastructure concerns, enabling:\n\n- **Database flexibility**: Switch between PostgreSQL (TypeORM) and MongoDB (Mongoose) without rewriting domain logic\n- **Testability**: Business logic remains isolated from external dependencies\n- **Maintainability**: Clear boundaries between layers prevent code entanglement\n\n## Architecture Overview\n\nThe project enforces a layered architecture with explicit dependency rules:\n\n```mermaid\nflowchart TD\n subgraph \"API Layer\"\n C[Controllers]\n D[DTOs]\n end\n \n subgraph \"Domain Layer\"\n E[Entities]\n S[Services]\n end\n \n subgraph \"Ports\"\n P[Interfaces]\n end\n \n subgraph \"Infrastructure\"\n R[Repositories]\n A[Adapters]\n end\n \n C --> D\n D --> S\n S --> P\n P --> R\n R --> A\n```\n\n**Dependency Flow**: Controllers → DTOs → Domain Services → Ports → Infrastructure Adapters\n\n### Layer Responsibilities\n\n| Layer | Location | Purpose |\n|-------|----------|---------|\n| **Domain** | `domain/` | Business entities and pure logic. No infrastructure imports. |\n| **Infrastructure** | `infrastructure/` | TypeORM/Mongoose entities, repository implementations, external service adapters |\n| **DTOs** | `dto/` | Request/response validation objects using `class-validator` |\n| **Ports** | `ports/` | Interface contracts that infrastructure must implement |\n| **Adapters** | `adapters/` | Concrete implementations of ports (e.g., S3 file storage, email services) |\n\n## Technology Stack\n\n### Core Framework\n- **NestJS v11+**: Progressive Node.js framework with built-in dependency injection\n- **TypeScript**: Type-safe JavaScript superset\n\n### Database Support\n- **PostgreSQL** via TypeORM (relational)\n- **MongoDB** via Mongoose (document)\n\n### Authentication\n- JWT-based stateless authentication\n- Social login integration: Apple, Facebook, Google\n\n### Additional Services\n- **Swagger/OpenAPI**: Auto-generated API documentation at `/docs`\n- **Nodemailer**: Email sending capability\n- **File Storage**: Local filesystem and Amazon S3 support\n- **i18n**: Internationalization via `nestjs-i18n`\n\n## Key Features\n\n### API Configuration\n- **Versioning**: URI-based (e.g., `/v1/users`)\n- **Validation**: Global `ValidationPipe` enforces DTO rules on all requests\n- **Serialization**: `ClassSerializerInterceptor` handles response transformation (respects `@Exclude()` decorators)\n- **Promise Resolution**: `ResolvePromisesInterceptor` unwraps promises for consistent response handling\n\n### Database Operations\n```bash\n# Generate a migration\nnpm run migration:generate -- src/database/migrations/MigrationName\n\n# Run pending migrations\nnpm run migration:run\n\n# Revert last migration\nnpm run migration:revert\n\n# Seed relational database\nnpm run seed:run:relational\n\n# Seed document database\nnpm run seed:run:document\n```\n\n### Code Generation\nThe project uses **Hygen** templates for scaffolding:\n```bash\n# Generate new relational resource\nnpm run generate:resource:relational\n\n# Generate new document resource\nnpm run generate:resource:document\n\n# Add property to relational entity\nnpm run add:property:to-relational\n\n# Add property to document entity\nnpm run add:property:to-document\n```\n\n## Development Workflow\n\n### Setup\n1. Install dependencies: `npm install`\n2. Configure environment:\n - Copy `env-example-relational` for PostgreSQL\n - Copy `env-example-document` for MongoDB\n - Rename to `.env` and adjust values\n\n### Running\n```bash\n# Development with hot reload\nnpm run start:dev\n\n# Production build\nnpm run build\nnpm run start:prod\n```\n\n### Testing\n```bash\n# Unit tests\nnpm run test\n\n# End-to-end tests\nnpm run test:e2e\n```\n\n## Conventions\n\n### Naming\nFollow standard NestJS conventions:\n- Service: `user.service.ts`\n- Controller: `user.controller.ts`\n- Module: `user.module.ts`\n\n### Persistence Layer\n- Database-specific logic lives in `infrastructure/persistence`\n- Use **mappers** to convert between database entities and domain entities\n- Never import TypeORM/Mongoose decorators in the domain layer\n\n### Documentation\n- Use Swagger decorators (`@ApiProperty`, `@ApiOperation`, `@ApiBearerAuth`) on all endpoints\n- Document request/response schemas in DTOs\n\n### Version Control\nThe project enforces **Conventional Commits** via Husky and commitlint. Ensure commit messages follow the format:\n```\n<type>(<scope>): <description>\n\nTypes: feat, fix, docs, style, refactor, test, chore\n```\n\n## Integration Points\n\n### External Knowledge Graph\nThe project includes a **graphify** knowledge graph in `graphify-out/`. Before answering architecture questions:\n1. Read `graphify-out/GRAPH_REPORT.md` for god nodes and community structure\n2. If `graphify-out/wiki/index.md` exists, navigate it for contextual information\n3. Use graph queries for cross-module relationship questions:\n ```bash\n graphify query \"<question>\"\n graphify path \"<module-a>\" \"<module-b>\"\n graphify explain \"<concept>\"\n ```\n\n### Agent Tools\n- **Issue Tracking**: GitHub integration via `gh` CLI (see `docs/agents/issue-tracker.md`)\n- **Triage Labels**: Standard roles mapped to GitHub labels (see `docs/agents/triage-labels.md`)\n- **Domain Docs**: Contextual documentation in `CONTEXT.md` and `docs/adr/` (see `docs/agents/domain.md`)\n\n## File Structure Reference\n\n```\nsrc/\n├── domain/ # Business logic (framework-agnostic)\n├── infrastructure/ # Database entities, repositories\n│ └── persistence/ # TypeORM/Mongoose implementations\n├── dto/ # Request/response validation\n├── ports/ # Interface definitions\n├── adapters/ # External service implementations\n├── database/\n│ ├── migrations/ # TypeORM migrations\n│ └── seeds/ # Data seeding scripts\n└── controllers/ # API endpoint handlers\n```","other-graphify-out":"# Other — graphify-out\n\n# Graphify-Out Module\n\nThis module contains the output artifacts from running **graphify**, a code knowledge graph extraction tool, on the codebase. It provides an interactive visualization and structured analysis of code relationships, dependencies, and architectural patterns.\n\n## Overview\n\nThe graphify-out directory captures the results of semantic and AST-based code analysis:\n\n| Metric | Value |\n|--------|-------|\n| Source files analyzed | 214 |\n| Total words extracted | ~52,691 |\n| Graph nodes | 748 |\n| Graph edges | 566 |\n| Communities detected | 206 |\n\n## File Structure\n\n```\ngraphify-out/\n├── GRAPH_REPORT.md # Human-readable analysis summary\n├── cost.json # Token usage and run metadata\n└── graph.html # Interactive network visualization\n```\n\n### GRAPH_REPORT.md\n\nThe main analysis report containing:\n\n- **Corpus Check** — Warning about large corpus size and token cost implications\n- **Summary Statistics** — Node/edge counts, extraction confidence (99% EXTRACTED, 1% INFERRED)\n- **Community Hubs** — Named clusters representing functional areas of the codebase\n- **God Nodes** — Most-connected entities (core abstractions)\n- **Surprising Connections** — Non-obvious relationships detected between modules\n- **Hyperedges** — Group relationships spanning multiple domains\n- **Knowledge Gaps** — Isolated or weakly-connected nodes that may need documentation\n\n### cost.json\n\nTracks token consumption across graphify runs:\n\n```json\n{\n \"runs\": [\n {\"date\": \"2026-05-02T12:47:12.004441+00:00\", \"files\": 187},\n {\"date\": \"2026-05-03T00:06:34.558922+00:00\", \"files\": 28}\n ],\n \"total_input_tokens\": 0,\n \"total_output_tokens\": 0\n}\n```\n\n### graph.html\n\nInteractive visualization built with vis-network. Features include:\n\n- **Search** — Find nodes by name\n- **Node inspection** — Click any node to see its connections and metadata\n- **Community filtering** — Show/hide node groups\n- **Navigation** — Pan, zoom, and drag to explore the graph\n\n## Key Findings from Analysis\n\n### Core Abstractions (God Nodes)\n\nThe most connected nodes in the graph represent the system's central concepts:\n\n1. **AuthService** — 15 edges (authentication logic hub)\n2. **AuthController** — 13 edges (API surface for auth)\n3. **OrchestratorService** — 11 edges (workflow coordination)\n4. **UsersService** — 10 edges (user management)\n5. **UsersRelationalRepository** / **UsersDocumentRepository** — 10 edges each (persistence abstraction)\n\n### Major Community Clusters\n\nThe graph identified these functional communities:\n\n| Community | Description | Key Nodes |\n|-----------|-------------|-----------|\n| File Persistence & Infrastructure | File upload/storage | FileEntity, FileMapper, FileRepository |\n| Authentication Service Logic | Core auth business logic | AuthService |\n| Authentication Controller API | Auth endpoints | AuthController |\n| Database Seeding & RBAC | Initial data & roles | SeedModule, RoleSeedModule, RolesGuard |\n| User Management & Persistence | User domain | User, UserEntity, SessionService |\n| GitLab/GitHub Adapters | VCS integration | GitLabAdapter, GitHubAdapter |\n| Symphony Workflow | Automation workflows | WorkflowService, PersistenceService |\n| Plane.so Integration | Issue tracking | PlaneAdapter, PlaneClient |\n| Jules (AI) Adapter | AI agent integration | JulesAdapter, JulesClient |\n\n### Surprising Connections\n\nThe analysis detected non-obvious relationships:\n\n- **Boilerplate Features** → **About Databases** (conceptual link between README and database docs)\n- **Changelog v1.2.0** → **Hexagonal Architecture** (documentation reference)\n- **SeedModule** → **RoleSeedModule** / **StatusSeedModule** (dependency injection chain)\n\n### Knowledge Gaps\n\n185 nodes have ≤1 connection, indicating:\n\n- Undocumented components\n- Missing import/export relationships\n- Standalone utility modules\n- Possible dead code\n\n## Usage\n\n### Viewing the Graph\n\nOpen `graph.html` in a browser to explore interactively:\n\n```bash\n# macOS\nopen graphify-out/graph.html\n\n# Linux\nxdg-open graphify-out/graph.html\n\n# Windows\nstart graphify-out/graph.html\n```\n\n### Reading the Report\n\n`GRAPH_REPORT.md` provides a navigable summary. Communities are linked and can serve as a starting point for understanding module boundaries.\n\n### Integrating with CI/CD\n\nThe `cost.json` tracks analysis expenses. For large codebases, consider:\n\n```bash\n# Run on subset of files\ngraphify --path ./src/auth ./src/users\n\n# Skip semantic extraction (faster, AST-only)\ngraphify --no-semantic\n```\n\n## Relationship to Codebase\n\nThis module is **output-only** — it does not contain executable code. It represents a snapshot of the codebase structure at analysis time. Regenerate after significant architectural changes:\n\n```bash\ngraphify --regenerate\n```\n\nThe graph reveals that this is a NestJS application with:\n- Dual database support (PostgreSQL/TypeORM, MongoDB/Mongoose)\n- Multi-provider authentication (Google, Facebook, Apple, email)\n- File storage abstraction (local, S3, presigned URLs)\n- External service adapters (GitHub, GitLab, Plane.so, Jules AI)\n- Workflow automation (Symphony)","other-maildev-dockerfile":"# Other — maildev.Dockerfile\n\n# maildev.Dockerfile\n\nA development container configuration for Maildev, a local SMTP server and web interface for testing email functionality.\n\n## Purpose\n\nThis Dockerfile provides a self-contained Maildev instance for development environments. It allows developers to:\n\n- Capture outgoing emails from applications without sending them to real recipients\n- Inspect email content, headers, and attachments through a web UI\n- Test email-triggered workflows without configuring a real email service\n\n## How It Works\n\n```mermaid\nflowchart LR\n subgraph Development Environment\n A[Application] -->|SMTP Port 25| B[Maildev Container]\n B -->|Web UI Port 1080| C[Developer Browser]\n end\n \n style B fill:#ddffdd\n style C fill:#ddddff\n```\n\n**Base Image**: `node:24.14.1-alpine` — lightweight Node.js runtime\n\n**Key Operations**:\n1. Installs Maildev v2.0.5 globally via npm\n2. Configures `maildev` as the default container command\n\n## Usage\n\nThis container typically runs alongside application services in a Docker Compose setup:\n\n```yaml\n# docker-compose.yml example\nservices:\n maildev:\n build:\n context: .\n dockerfile: maildev.Dockerfile\n ports:\n - \"1080:1080\" # Web UI\n - \"1025:1025\" # SMTP\n```\n\n**Default Ports**:\n| Port | Service |\n|------|---------|\n| 1080 | Web interface |\n| 1025 | SMTP server |\n\n## Application Integration\n\nConfigure your application to send emails to the Maildev SMTP server:\n\n```javascript\n// Example: Nodemailer transport configuration\nconst transporter = nodemailer.createTransport({\n host: 'maildev',\n port: 25,\n secure: false\n});\n```\n\n## Accessing the Web UI\n\nOnce running, open `http://localhost:1080` in your browser to:\n- View received emails\n- Inspect HTML and plain-text content\n- Check attachments\n- Verify email headers and delivery status\n\n## Version Notes\n\n- **Node.js**: 24.14.1 (Alpine Linux)\n- **Maildev**: 2.0.5\n\nThe Alpine base image keeps the container footprint minimal while providing the Node.js runtime required by Maildev.","other-nest-cli-json":"# Other — nest-cli.json\n\n# nest-cli.json Configuration\n\n## Overview\n\nThe `nest-cli.json` file is the primary configuration file for the NestJS CLI. It defines how the CLI generates code, where to find source files, and what additional assets to include during compilation and development.\n\nThis configuration file is read by the NestJS CLI commands (`nest new`, `nest generate`, `nest build`, `nest start`) to determine project structure and build behavior.\n\n## Configuration Properties\n\n### `$schema`\n\n```json\n\"$schema\": \"https://json.schemastore.org/nest-cli\"\n```\n\nPoints to the official JSON Schema Store for Nest CLI schemas. This enables IDE features such as:\n- Autocomplete suggestions while editing\n- Inline validation errors\n- Type checking for configuration values\n\n### `collection`\n\n```json\n\"collection\": \"@nestjs/schematics\"\n```\n\nSpecifies the schematic collection used for code generation. The `@nestjs/schematics` package provides templates for generating:\n\n- Controllers (`nest g controller`)\n- Services (`nest g service`)\n- Modules (`nest g module`)\n- Providers, guards, interceptors, and other NestJS constructs\n\nThis setting determines what code templates are used when running `nest generate` commands.\n\n### `sourceRoot`\n\n```json\n\"sourceRoot\": \"src\"\n```\n\nDefines the root directory containing TypeScript source files. The NestJS compiler looks in this directory for:\n\n- Application modules\n- Controllers and providers\n- Configuration files\n- TypeScript entry points (main.ts, app.module.ts)\n\nThe compiled output is placed in a `dist/` directory with the same structure.\n\n### `compilerOptions`\n\nBuild and compilation settings for the NestJS CLI.\n\n#### `assets`\n\n```json\n\"compilerOptions\": {\n \"assets\": [{ \"include\": \"i18n/**/*\", \"watchAssets\": true }]\n}\n```\n\nThe `assets` array specifies non-TypeScript files to copy to the `dist/` directory during compilation.\n\n| Property | Value | Purpose |\n|----------|-------|---------|\n| `include` | `i18n/**/*` | Glob pattern matching all files in the `i18n/` directory and subdirectories |\n| `watchAssets` | `true` | Enables file watching on these assets during development with `nest start --watch` |\n\n**Why this matters:** Internationalization files (JSON, YAML, or other locale files) must be available at runtime but are not TypeScript code. This configuration ensures they're:\n\n1. Copied to the output directory during `nest build`\n2. Hot-reloaded when changed during development\n3. Available in the production `dist/` folder\n\n## Directory Structure\n\n```\nproject/\n├── src/ # sourceRoot\n│ ├── app.module.ts\n│ ├── main.ts\n│ └── ...\n├── i18n/ # Included as compiler asset\n│ ├── en.json\n│ ├── es.json\n│ └── ...\n├── dist/ # Compiled output\n│ ├── src/\n│ └── i18n/ # Assets copied here\n├── nest-cli.json # This configuration\n└── package.json\n```\n\n## Common CLI Commands Using This Config\n\n| Command | Config Properties Used |\n|---------|----------------------|\n| `nest new <name>` | `collection` (for templates) |\n| `nest g resource <name>` | `collection`, `sourceRoot` |\n| `nest build` | `sourceRoot`, `compilerOptions.assets` |\n| `nest start --watch` | `sourceRoot`, `compilerOptions.assets`, `watchAssets` |\n\n## Customization\n\n### Adding More Assets\n\nTo include additional non-TypeScript files (e.g., static assets, configuration files):\n\n```json\n\"compilerOptions\": {\n \"assets\": [\n { \"include\": \"i18n/**/*\", \"watchAssets\": true },\n { \"include\": \"config/*.json\", \"watchAssets\": false },\n { \"include\": \"public/**/*\" }\n ]\n}\n```\n\n### Changing Source Root\n\nIf organizing code in a different directory:\n\n```json\n\"sourceRoot\": \"app\"\n```\n\nThis would look for source files in an `app/` directory instead of `src/`.\n\n### Using Custom Schematics\n\nTo use a custom schematic collection:\n\n```json\n\"collection\": \"@my-org/nestjs-custom-schematics\"\n```\n\n## Relationship to Other Files\n\n- **package.json** — Defines dependencies; NestJS CLI must be installed (`@nestjs/cli`)\n- **tsconfig.json** — TypeScript configuration; works alongside nest-cli.json for compilation\n- **webpack.config.js** (optional) — Custom webpack build; overrides some nest-cli.json settings when present","other-package-json":"# Other — package.json\n\n# nestjs-boilerplate — package.json\n\n## Overview\n\nThis package.json defines a comprehensive NestJS boilerplate supporting both **relational** (PostgreSQL via TypeORM) and **document** (MongoDB via Mongoose) databases. The project provides scaffolding, code generation, database migrations, seeding, and full-stack development tooling.\n\n## Project Metadata\n\n| Property | Value |\n|----------|-------|\n| Name | `nestjs-boilerplate` |\n| Version | `1.2.0` |\n| License | MIT |\n| Node.js | >= 16.0.0 |\n| npm | >= 8.0.0 |\n\n## NPM Scripts\n\n### Database Migrations\n\n```bash\nnpm run migration:generate # Generate a migration from entity changes\nnpm run migration:create # Create an empty migration file\nnpm run migration:run # Apply pending migrations\nnpm run migration:revert # Revert the last migration\nnpm run schema:drop # Drop the entire schema (use with caution)\n```\n\n### Seed Data Management\n\n```bash\nnpm run seed:create:relational # Create a seed for relational DB\nnpm run seed:create:document # Create a seed for MongoDB\nnpm run seed:run:relational # Execute relational database seeds\nnpm run seed:run:document # Execute MongoDB seeds\n```\n\n### Code Generation\n\nThe boilerplate uses **hygen** for code generation with templates:\n\n```bash\n# Generate complete CRUD resources\nnpm run generate:resource:relational # For PostgreSQL entities\nnpm run generate:resource:document # For MongoDB documents\nnpm run generate:resource:all-db # For both database types\n\n# Add properties to existing resources\nnpm run add:property:to-all-db # Add to both DB types\nnpm run add:property:to-document # Add to MongoDB only\nnpm run add:property:to-relational # Add to PostgreSQL only\n```\n\n### Application Lifecycle\n\n```bash\nnpm run build # Compile TypeScript to dist/\nnpm run start # Start the application\nnpm run start:dev # Start with hot reload (watch mode)\nnpm run start:swc # Start with SWC compiler for faster rebuilds\nnpm run start:debug # Start with debugger attached\nnpm run start:prod # Run the compiled production build\n```\n\n### Code Quality\n\n```bash\nnpm run lint # Run ESLint\nnpm run format # Format code with Prettier\n```\n\n### Testing\n\n```bash\nnpm run test # Run unit tests\nnpm run test:watch # Run tests in watch mode\nnpm run test:cov # Run tests with coverage report\nnpm run test:debug # Debug tests with Node inspector\nnpm run test:e2e # Run end-to-end tests\nnpm run test:e2e:relational:docker # Run e2e tests for relational DB in Docker\nnpm run test:e2e:document:docker # Run e2e tests for MongoDB in Docker\n```\n\n### Release\n\n```bash\nnpm run release # Create a release with conventional-changelog\n```\n\n## Dependencies\n\n### NestJS Core\n\n| Package | Purpose |\n|---------|---------|\n| `@nestjs/common` | Core NestJS abstractions |\n| `@nestjs/core` | Core NestJS engine |\n| `@nestjs/platform-express` | Express adapter |\n| `@nestjs/config` | Configuration management |\n| `@nestjs/swagger` | OpenAPI/Swagger documentation |\n| `@nestjs/jwt` | JWT authentication |\n| `@nestjs/passport` | Passport integration |\n| `@nestjs/typeorm` | TypeORM integration |\n| `@nestjs/mongoose` | Mongoose/MongoDB integration |\n\n### Database\n\n| Package | Purpose |\n|---------|---------|\n| `typeorm` | Relational database ORM (PostgreSQL) |\n| `mongoose` | MongoDB ODM |\n| `mongoose-autopopulate` | Automatic population for Mongoose |\n| `pg` | PostgreSQL driver |\n\n### Authentication\n\n| Package | Purpose |\n|---------|---------|\n| `passport` | Authentication framework |\n| `passport-jwt` | JWT strategy |\n| `passport-anonymous` | Anonymous auth strategy |\n| `google-auth-library` | Google OAuth |\n| `apple-signin-auth` | Apple Sign In |\n| `bcryptjs` | Password hashing |\n\n### File Storage & Email\n\n| Package | Purpose |\n|---------|---------|\n| `@aws-sdk/client-s3` | AWS S3 client |\n| `@aws-sdk/s3-request-presigner` | Pre-signed S3 URLs |\n| `multer` | File upload middleware |\n| `multer-s3` | S3 storage for multer |\n| `nodemailer` | Email sending |\n\n### Internationalization & Templating\n\n| Package | Purpose |\n|---------|---------|\n| `nestjs-i18n` | Internationalization |\n| `handlebars` | Template engine |\n| `liquidjs` | Liquid template engine |\n\n### Validation & Serialization\n\n| Package | Purpose |\n|---------|---------|\n| `class-validator` | DTO validation |\n| `class-transformer` | DTO serialization |\n\n## Dev Dependencies\n\n### Build & TypeScript\n\n- `@nestjs/cli` — NestJS CLI for scaffolding\n- `@swc/cli`, `@swc/core` — Fast TypeScript/JavaScript compiler\n- `typescript` — TypeScript language\n- `ts-node` — Execute TypeScript directly\n- `ts-loader` — TypeScript loader for webpack\n\n### Testing\n\n- `jest` — Test runner\n- `ts-jest` — Jest TypeScript transformer\n- `@nestjs/testing` — NestJS testing utilities\n- `supertest` — HTTP assertion library\n\n### Code Quality\n\n- `eslint` — Linting\n- `prettier` — Code formatting\n- `husky` — Git hooks\n- `@commitlint/cli` — Conventional commit validation\n\n### Code Generation\n\n- `hygen` — Code generator (templates in `/_templates`)\n\n### Release Management\n\n- `release-it` — Automated releases\n- `@release-it/conventional-changelog` — Conventional changelog\n\n## Jest Configuration\n\n```json\n{\n \"moduleFileExtensions\": [\"js\", \"json\", \"ts\"],\n \"rootDir\": \"src\",\n \"testRegex\": \".*\\\\.spec\\\\.ts$\",\n \"transform\": {\n \"^.+\\\\.(t|j)s$\": \"ts-jest\"\n },\n \"collectCoverageFrom\": [\"**/*.(t|j)s\"],\n \"coverageDirectory\": \"../coverage\",\n \"testEnvironment\": \"node\"\n}\n```\n\nKey points:\n- Test files must end with `.spec.ts`\n- Tests run from the `src` directory\n- Coverage reports output to `coverage/`\n\n## Release Configuration\n\nThe project uses `release-it` with conventional-changelog:\n\n- Creates GitHub releases\n- Generates `CHANGELOG.md` from conventional commits\n- Groups changes by type: Features, Bug Fixes, Tests, Documentation, etc.\n- Does not publish to npm (`\"publish\": false`)\n\n## Development Workflow\n\n### Initial Setup\n\n```bash\nnpm run app:config # Run configuration script and install dependencies\n```\n\n### Creating a New Feature\n\n1. Generate a resource:\n ```bash\n npm run generate:resource:all-db\n ```\n\n2. Run migrations if entities changed:\n ```bash\n npm run migration:generate\n npm run migration:run\n ```\n\n3. Start development:\n ```bash\n npm run start:dev\n ```\n\n### Running Tests\n\n```bash\n# Unit tests\nnpm run test\n\n# E2E tests with Docker\nnpm run test:e2e:relational:docker\nnpm run test:e2e:document:docker\n```\n\n## Database Support\n\nThis boilerplate supports two database paradigms:\n\n1. **Relational** — PostgreSQL with TypeORM\n - Uses `src/database/data-source.ts` for CLI operations\n - Migrations stored in `src/database/migrations/relational`\n\n2. **Document** — MongoDB with Mongoose\n - Models in `src/database/schemas`\n - Seeds in `src/database/seeds/document`\n\nBoth can be used simultaneously in the same application.","other-procfile":"# Other — Procfile\n\n# Procfile\n\nThis module defines the process types required to run the application on container-based deployment platforms (Heroku, Render, Railway, Fly.io, etc.).\n\n## Overview\n\nThe Procfile specifies the commands that the platform orchestrator executes when starting your application. Each line defines a **process type** — a named command that can be scaled independently.\n\n## Process Definitions\n\n### `web`\n\n```\nweb: npm run start:prod\n```\n\nStarts the production web server. This is the main HTTP server that handles incoming requests.\n\n- **Command**: Runs the `start:prod` npm script\n- **Scaling**: Can be scaled to multiple instances\n- **Port**: Expects the `PORT` environment variable to be set by the platform\n\n### `release`\n\n```\nrelease: echo '' > .env && npm run migration:run && npm run seed:run:relational\n```\n\nRuns one-time tasks during each deployment **before** the new `web` process starts.\n\n**Execution order:**\n1. `echo '' > .env` — Ensures a `.env` file exists (creates empty file if missing)\n2. `npm run migration:run` — Executes database schema migrations\n3. `npm run seed:run:relational` — Seeds the relational database with initial data\n\nThis process type is critical for ensuring the database schema is up-to-date and populated before traffic routes to the new release.\n\n## Platform Behavior\n\n| Event | Triggered Process |\n|-------|-------------------|\n| Initial deploy | `release` → `web` |\n| Subsequent deploy | `release` → `web` (old version stops) |\n| Scaling (web) | Only `web` |\n| Restart | Only `web` |\n\nThe `release` process runs on every `git push` that triggers a new deployment. If it fails, the new release is not deployed and traffic continues routing to the previous healthy version.\n\n## Relationship to npm Scripts\n\nThe Procfile delegates to npm scripts defined in `package.json`:\n\n| Procfile Command | Invokes npm Script |\n|------------------|-------------------|\n| `npm run start:prod` | Starts the production server (typically Node.js with Express or similar) |\n| `npm run migration:run` | Runs pending database migrations (e.g., Knex, TypeORM, Prisma) |\n| `npm run seed:run:relational` | Populates the relational database with initial seed data |\n\n## Requirements\n\n- Node.js and npm must be available in the runtime environment\n- The `PORT` environment variable must be exposed to the `web` process\n- Database connection credentials must be available in the environment\n\n## Troubleshooting\n\n**Release process fails:**\n- Check that `migration:run` and `seed:run:relational` scripts are valid in `package.json`\n- Verify database credentials are present in the environment\n- Review migration and seed script output for specific errors\n\n**Web process crashes immediately:**\n- Confirm `start:prod` script exists in `package.json`\n- Ensure the `PORT` environment variable is set by the platform\n- Check application logs for runtime errors","other-readme-md":"# Other — README.md\n\n# NestJS REST API Boilerplate\n\nA production-ready NestJS REST API boilerplate for building scalable backend applications. This boilerplate provides a solid foundation with pre-configured tooling, patterns, and integrations commonly needed in real-world projects.\n\n## Overview\n\nThis boilerplate is maintained by [Brocoders](https://brocoders.com/) and belongs to the [bc boilerplates](https://bcboilerplates.com/) ecosystem. It offers a comprehensive starting point for NestJS projects with authentication, database support, internationalization, file handling, and more.\n\n**Demo API Documentation**: https://nestjs-boilerplate-test.herokuapp.com/docs\n\n**Related Frontend**: [extensive-react-boilerplate](https://github.com/brocoders/extensive-react-boilerplate)\n\n## Architecture\n\n```mermaid\nflowchart TB\n subgraph Client_Layer[\"Client Layer\"]\n SW[Swagger UI]\n FE[Frontend Apps]\n Mobile[Mobile Apps]\n end\n \n subgraph API_Gateway[\"NestJS Application\"]\n Auth[Auth Module]\n Users[Users Module]\n Files[Files Module]\n Mail[Mail Module]\n I18n[Internationalization]\n end\n \n subgraph Data_Layer[\"Data Layer\"]\n DB[(Database)]\n S3[Amazon S3]\n SMTP[Email Service]\n end\n \n SW --> API_Gateway\n FE --> API_Gateway\n Mobile --> API_Gateway\n \n API_Gateway --> DB\n API_Gateway --> S3\n API_Gateway --> SMTP\n```\n\n## Features\n\n### Database & ORM\n\n- **TypeORM** support for SQL databases (PostgreSQL, MySQL, SQLite)\n- **Mongoose** support for MongoDB\n- Automatic migrations and seeding for development data\n\n### Authentication & Authorization\n\n- Email/password sign in and sign up\n- Social authentication (Apple, Facebook, Google)\n- Role-based access control (Admin and User roles)\n- JWT-based session management\n\n### Internationalization\n\n- Multi-language support via [nestjs-i18n](https://www.npmjs.com/package/nestjs-i18n)\n- Ready for localization in multiple languages\n\n### File Handling\n\n- Local storage driver for development\n- Amazon S3 driver for production deployments\n- Configurable upload handlers\n\n### API Documentation\n\n- Auto-generated Swagger/OpenAPI documentation\n- Interactive API exploration at `/docs`\n\n### Mailing\n\n- Email integration via [nodemailer](https://www.npmjs.com/package/nodemailer)\n- Templated email support\n\n### Testing\n\n- Unit tests for individual components\n- E2E tests for full API workflows\n\n### DevOps\n\n- Docker containerization\n- GitHub Actions CI/CD pipelines\n- Renovate bot for dependency updates\n\n## Project Structure\n\n```\nnestjs-boilerplate/\n├── src/\n│ ├── config/ # Configuration modules\n│ ├── database/ # Database migrations & seeds\n│ ├── auth/ # Authentication module\n│ ├── users/ # User management\n│ ├── files/ # File upload handling\n│ ├── mail/ # Email templates & sending\n│ └── i18n/ # Translation files\n├── test/ # E2E tests\n├── docker/ # Docker configurations\n└── docs/ # Extended documentation\n```\n\n## Quick Start\n\n### Prerequisites\n\n- Node.js (LTS version)\n- Docker (for containerized development)\n- PostgreSQL, MySQL, or MongoDB (depending on ORM choice)\n\n### Installation\n\n```bash\n# Clone the repository\ngit clone https://github.com/brocoders/nestjs-boilerplate.git\n\n# Install dependencies\nnpm install\n\n# Copy environment configuration\ncp .env.example .env\n\n# Start with Docker\ndocker-compose up -d\n```\n\n### Running the Application\n\n```bash\n# Development\nnpm run start:dev\n\n# Production\nnpm run start:prod\n\n# Run tests\nnpm run test\nnpm run test:e2e\n```\n\n## Configuration\n\nEnvironment variables are managed via `@nestjs/config`. Key configuration options include:\n\n| Variable | Description | Default |\n|----------|-------------|---------|\n| `NODE_ENV` | Application environment | `development` |\n| `PORT` | Server port | `3000` |\n| `DATABASE_URL` | Database connection string | - |\n| `JWT_SECRET` | Secret for JWT signing | - |\n| `AWS_S3_BUCKET` | S3 bucket for file uploads | - |\n\n## API Endpoints\n\nThe boilerplate provides the following core endpoints:\n\n```\nPOST /auth/signup # Register new user\nPOST /auth/signin # Authenticate user\nPOST /auth/refresh # Refresh access token\n\nGET /users # List users (admin)\nGET /users/:id # Get user by ID\n\nPOST /files/upload # Upload file\nGET /files/:filename # Download file\n```\n\nFull API documentation is available at `/docs` when the server is running.\n\n## Extending the Boilerplate\n\n### Adding a New Module\n\n```bash\n# Generate module, controller, and service\nnest g module new-module\nnest g controller new-module\nnest g service new-module\n```\n\n### Switching Database\n\nEdit `app.module.ts` to use either the TypeORM or Mongoose configuration:\n\n```typescript\n// For TypeORM\nimport { TypeOrmModule } from '@nestjs/typeorm';\n\n// For Mongoose\nimport { MongooseModule } from '@nestjs/mongoose';\n```\n\n### Adding Social Login\n\nConfigure credentials in your environment file and update the auth service to include additional OAuth providers.\n\n## Troubleshooting\n\n- **Database connection errors**: Verify `DATABASE_URL` is correctly set\n- **File upload failures**: Check write permissions for local storage or S3 credentials\n- **Auth token issues**: Ensure `JWT_SECRET` is properly configured\n\n## Contributing\n\nContributions are welcome. Please ensure tests pass before submitting pull requests.\n\n## License\n\nMIT License\n\n## Support\n\n- **Email**: boilerplates@brocoders.com\n- **GitHub Discussions**: https://github.com/brocoders/nestjs-boilerplate/discussions\n- **Discord**: https://discord.com/channels/520622812742811698/1197293125433691","other-relational-e2e-dockerfile":"# Other — relational.e2e.Dockerfile\n\n# relational.e2e.Dockerfile\n\n## Overview\n\nThis Dockerfile builds the containerized environment for running end-to-end (E2E) tests against a NestJS application with a relational database. It creates a self-contained test environment that includes the application, its dependencies, and the scripts needed to orchestrate test execution in a CI pipeline.\n\n## Base Image\n\n```dockerfile\nFROM node:24.14.1-alpine\n```\n\nThe image uses Node.js 24.14.1 on Alpine Linux, providing a lightweight runtime environment. Alpine keeps the final image size small while still supporting all required Node.js and TypeScript tooling.\n\n## System Dependencies\n\n```dockerfile\nRUN apk add --no-cache bash\n```\n\nAlpine Linux ships with `sh` by default, but many Node.js scripts and the NestJS CLI assume `bash` is available. This installs bash without caching the package index to keep the image lean.\n\n## Global npm Packages\n\n```dockerfile\nRUN npm i -g @nestjs/cli typescript ts-node\n```\n\nThree packages are installed globally:\n\n- **@nestjs/cli** — The NestJS command-line interface, used to generate, build, and run NestJS applications\n- **typescript** — The TypeScript compiler, required for building the project\n- **ts-node** — Executes TypeScript directly without pre-compilation, useful for development and certain test scenarios\n\n## Dependency Installation\n\n```dockerfile\nCOPY package*.json /tmp/app/\nRUN cd /tmp/app && npm install\n\nCOPY . /usr/src/app\nRUN cp -a /tmp/app/node_modules /usr/src/app\n```\n\nThe Dockerfile uses a multi-step approach to optimize layer caching:\n\n1. Copies only `package.json` and `package-lock.json` first\n2. Runs `npm install` in a temporary directory\n3. Copies the full application source code\n4. Copies the installed `node_modules` to the final location\n\nThis separation means that if only source code changes (without dependency changes), Docker can reuse the cached dependency layer, significantly speeding up rebuilds.\n\n## Helper Scripts\n\nThree scripts are copied into the image:\n\n```dockerfile\nCOPY ./wait-for-it.sh /opt/wait-for-it.sh\nCOPY ./startup.relational.ci.sh /opt/startup.relational.ci.sh\nRUN chmod +x /opt/wait-for-it.sh\nRUN chmod +x /opt/startup.relational.ci.sh\nRUN sed -i 's/\\r//g' /opt/wait-for-it.sh\nRUN sed -i 's/\\r//g' /opt/startup.relational.ci.sh\n```\n\n| Script | Purpose |\n|--------|---------|\n| `wait-for-it.sh` | Polls a service (typically the database) until it accepts connections, preventing the application from starting before its dependencies are ready |\n| `startup.relational.ci.sh` | Orchestrates the test execution flow — starts required services, runs migrations, executes tests, and handles cleanup |\n\nThe `sed` commands remove Windows-style line endings (`\\r\\n`) that can cause script execution failures on Linux.\n\n## Application Build\n\n```dockerfile\nWORKDIR /usr/src/app\nRUN echo \"\" > .env\nRUN npm run build\n```\n\n- Sets the working directory to `/usr/src/app`\n- Creates an empty `.env` file (the CI environment provides configuration via environment variables or the startup script)\n- Runs `npm run build` to compile the TypeScript source code into JavaScript\n\n## Startup Command\n\n```dockerfile\nCMD [\"/opt/startup.relational.ci.sh\"]\n```\n\nThe container executes the startup script on launch, which handles the full E2E test lifecycle.\n\n## Architecture\n\n```mermaid\ngraph TD\n subgraph \"Container\"\n A[\"startup.relational.ci.sh\"] --> B[\"wait-for-it.sh\"]\n B --> C[\"Database Service\"]\n A --> D[\"npm run build\"]\n D --> E[\"NestJS Application\"]\n E --> C\n A --> F[\"E2E Test Suite\"]\n end\n \n G[\".env / Environment Variables\"] --> A\n```\n\n## Usage in CI\n\nThis image is typically invoked by a CI system (e.g., GitHub Actions, GitLab CI) that:\n\n1. Starts the container\n2. Provides environment variables for database connection strings and test configuration\n3. Captures the exit code to determine test pass/fail status\n4. Collects test reports and logs from the container output\n\nThe startup script handles all orchestration internally, so the CI job only needs to run the container and report results.\n\n## Key Considerations\n\n- **Layer caching** — Dependency installation is separated from source code copying to maximize Docker layer reuse during development\n- **Service readiness** — The `wait-for-it.sh` script ensures the database is fully available before the application starts, preventing race conditions\n- **Relational database** — The filename and scripts indicate this image is configured for a relational database (PostgreSQL, MySQL, etc.), not MongoDB or other NoSQL stores\n- **CI-first design** — The empty `.env` file and startup script pattern are designed for automated pipeline execution rather than local development","other-relational-test-dockerfile":"# Other — relational.test.Dockerfile\n\n# relational.test.Dockerfile\n\n## Overview\n\nThis Dockerfile builds a Docker image for running integration tests against a relational database in a NestJS application. It sets up a complete Node.js environment with TypeScript support, installs application dependencies, and configures helper scripts to manage test execution and service readiness.\n\nThe image is specifically designed for **relational database testing**, meaning it expects a relational database (PostgreSQL, MySQL, etc.) to be available as a dependency service.\n\n## Image Architecture\n\n```mermaid\nflowchart TB\n subgraph Build_Time[\"Build Phase\"]\n A[\"node:24.14.1-alpine\"] --> B[\"Install bash\"]\n B --> C[\"Install NestJS CLI, TypeScript, ts-node\"]\n C --> D[\"Copy & install npm dependencies\"]\n D --> E[\"Copy application code\"]\n E --> F[\"Copy helper scripts\"]\n end\n \n subgraph Run_Time[\"Runtime Phase\"]\n G[\"Execute startup script\"] --> H[\"Wait for dependencies\"]\n H --> I[\"Run test suite\"]\n end\n \n F --> G\n```\n\n## Base Image\n\n```dockerfile\nFROM node:24.14.1-alpine\n```\n\nThe image uses `node:24.14.1-alpine` as its base, providing Node.js 24.14.1 on an Alpine Linux minimal footprint. This version should match the Node.js version required by your application.\n\n## System Dependencies\n\n```dockerfile\nRUN apk add --no-cache bash\n```\n\nBash is installed because the helper scripts (`wait-for-it.sh` and `startup.relational.test.sh`) are written in Bash. The `--no-cache` flag keeps the image smaller by not storing the APK cache.\n\n## NestJS & TypeScript Tooling\n\n```dockerfile\nRUN npm i -g @nestjs/cli typescript ts-node\n```\n\nThree packages are installed globally:\n\n| Package | Purpose |\n|---------|---------|\n| `@nestjs/cli` | NestJS command-line interface for generating and running applications |\n| `typescript` | TypeScript compiler |\n| `ts-node` | Execute TypeScript directly without pre-compilation |\n\nThese tools enable running the NestJS application in development/test mode directly from TypeScript source files.\n\n## Dependency Installation\n\n```dockerfile\nCOPY package*.json /tmp/app/\nRUN cd /tmp/app && npm install\n```\n\nDependencies are installed in a temporary directory before copying the full application code. This is a Docker best practice that enables layer caching—if only the source code changes (not dependencies), Docker can reuse the cached dependency layer.\n\n## Application Code\n\n```dockerfile\nCOPY . /usr/src/app\n```\n\nThe entire application codebase is copied into `/usr/src/app`. This includes:\n\n- Source TypeScript files\n- Test files\n- Configuration files\n- Database schemas or migrations\n\n## Helper Scripts\n\n```dockerfile\nCOPY ./wait-for-it.sh /opt/wait-for-it.sh\nRUN chmod +x /opt/wait-for-it.sh\nCOPY ./startup.relational.test.sh /opt/startup.relational.test.sh\nRUN chmod +x /opt/startup.relational.test.sh\nRUN sed -i 's/\\r//g' /opt/wait-for-it.sh\nRUN sed -i 's/\\r//g' /opt/startup.relational.test.sh\n```\n\nTwo helper scripts are copied and made executable:\n\n| Script | Purpose |\n|--------|---------|\n| `wait-for-it.sh` | Blocks execution until a service (typically the relational database) is ready and accepting connections |\n| `startup.relational.test.sh` | Orchestrates the test startup process |\n\nThe `sed` commands remove Windows-style line endings (`\\r\\n`), converting them to Unix-style (`\\n`). This prevents \"bad interpreter\" errors when running scripts on Linux.\n\n## Working Directory\n\n```dockerfile\nWORKDIR /usr/src/app\n```\n\nSets the working directory to where the application code was copied, ensuring subsequent commands run in the correct context.\n\n## Environment Configuration\n\n```dockerfile\nRUN echo \"\" > .env\n```\n\nCreates an empty `.env` file. This ensures the application has an environment file available, even if it's initially empty. The actual environment variables are typically provided at runtime through Docker's `-e` flag or a docker-compose override.\n\n## Startup Command\n\n```dockerfile\nCMD [\"/opt/startup.relational.test.sh\"]\n```\n\nThe container starts by executing the startup script, which should:\n\n1. Wait for the relational database to be available (using `wait-for-it.sh`)\n2. Run any necessary database setup (migrations, seed data)\n3. Execute the test suite\n\n## Usage\n\nThis image is typically run via docker-compose alongside a relational database service:\n\n```yaml\n# Example docker-compose snippet\nservices:\n relational-db:\n image: postgres:16-alpine\n environment:\n POSTGRES_DB: testdb\n POSTGRES_USER: testuser\n POSTGRES_PASSWORD: testpass\n\n relational-test:\n build:\n context: .\n dockerfile: relational.test.Dockerfile\n depends_on:\n - relational-db\n environment:\n DATABASE_HOST: relational-db\n DATABASE_PORT: 5432\n```\n\n## Modifying This Dockerfile\n\nWhen making changes, consider:\n\n1. **Node version**: Update the base image tag if your application requires a different Node.js version\n2. **Additional global tools**: Add more `npm i -g` packages if your tests require them\n3. **Environment variables**: Modify the `.env` creation or provide variables at runtime\n4. **Startup logic**: Adjust `startup.relational.test.sh` if your test workflow differs","other-renovate-json":"# Other — renovate.json\n\n# renovate.json\n\n## Overview\n\nThis file configures **Renovate**, an automated dependency update tool that monitors your repository for outdated dependencies and creates pull requests to keep them current.\n\n## Configuration\n\n```json\n{\n \"extends\": [\n \"config:base\"\n ]\n}\n```\n\n### Extends: `config:base`\n\nThe `config:base` preset is Renovate's default configuration bundle. It includes sensible defaults for most JavaScript/TypeScript projects:\n\n| Setting | Value | Description |\n|---------|-------|-------------|\n| `rangeStrategy` | `replace` | Replace version ranges with newer versions |\n| `automerge` | `false` | Don't auto-merge PRs (requires manual review) |\n| `automergeType` | `pr` | Create PRs for all updates |\n| `schedule` | `[\"before 5am on the first day of the month\"]` | Monthly update checks |\n| `prHourlyLimit` | `2` | Limit hourly PR creation rate |\n| `prConcurrentLimit` | `10` | Limit concurrent open PRs |\n| `lockFileMaintenance` | `{ \"enabled\": true }` | Update lockfiles periodically |\n\n## Behavior\n\nWith this configuration, Renovate will:\n\n1. **Scan** your `package.json`, `package-lock.json`, and other dependency files\n2. **Check** for available updates against the npm registry\n3. **Create** pull requests when updates are available\n4. **Group** related updates into single PRs where possible\n\n## Customization\n\nTo modify this configuration, add additional presets or override specific settings:\n\n```json\n{\n \"extends\": [\n \"config:base\"\n ],\n \"schedule\": [\"after 10am and before 4pm on weekdays\"],\n \"automerge\": true,\n \"major\": {\n \"automerge\": false\n }\n}\n```\n\n## Integration\n\nThis file is read by the Renovate bot when it runs on your repository. No local installation is required if using the hosted Renovate app on GitHub, GitLab, or Bitbucket.\n\n## See Also\n\n- [Renovate Presets Documentation](https://docs.renovatebot.com/presets-config/)\n- [Renovate Configuration Options](https://docs.renovatebot.com/configuration-options/)","other-skills-lock-json":"# Other — skills-lock.json\n\n# skills-lock.json\n\n## Overview\n\n`skills-lock.json` is a lockfile that pins the exact version of each skill in the system. It serves as the source of truth for which skills are installed and ensures reproducible behavior by recording a cryptographic hash of each skill's content.\n\nThink of it as the equivalent of `package-lock.json` for npm — while a package.json declares *what* you need, the lockfile records *exactly* what you have.\n\n## File Structure\n\n```json\n{\n \"version\": 1,\n \"skills\": {\n \"<skill-name>\": {\n \"source\": \"string\",\n \"sourceType\": \"github\",\n \"skillPath\": \"string\",\n \"computedHash\": \"string\"\n }\n }\n}\n```\n\n### Fields\n\n| Field | Description |\n|-------|-------------|\n| `version` | Schema version for the lockfile format |\n| `skills` | Object mapping skill names to their locked metadata |\n| `source` | The GitHub repository containing the skill (e.g., `mattpocock/skills`) |\n| `sourceType` | The type of source — currently always `\"github\"` |\n| `skillPath` | Path to the `SKILL.md` file within the source repository |\n| `computedHash` | SHA-256 hash of the skill's content at lock time |\n\n## Purpose\n\nThe lockfile fulfills several critical functions:\n\n1. **Reproducibility** — Anyone cloning the project gets the exact same skills, byte-for-byte\n2. **Change detection** — The hash changes when skill content changes, triggering updates\n3. **Audit trail** — You can trace every skill back to its source and verify its integrity\n4. **Deduplication** — Multiple skills from the same source can be managed efficiently\n\n## Installed Skills\n\nThe lockfile currently contains 12 skills from the `mattpocock/skills` repository:\n\n### Engineering Skills\n\n| Skill | Path | Purpose |\n|-------|------|---------|\n| `diagnose` | `skills/engineering/diagnose/SKILL.md` | Diagnostic techniques |\n| `grill-with-docs` | `skills/engineering/grill-with-docs/SKILL.md` | Code review with documentation |\n| `improve-codebase-architecture` | `skills/engineering/improve-codebase-architecture/SKILL.md` | Architecture improvement |\n| `setup-matt-pocock-skills` | `skills/engineering/setup-matt-pocock-skills/SKILL.md` | Initial setup |\n| `tdd` | `skills/engineering/tdd/SKILL.md` | Test-driven development |\n| `to-issues` | `skills/engineering/to-issues/SKILL.md` | Converting work to issues |\n| `to-prd` | `skills/engineering/to-prd/SKILL.md` | Creating product requirements |\n| `triage` | `skills/engineering/triage/SKILL.md` | Issue triage |\n| `zoom-out` | `skills/engineering/zoom-out/SKILL.md` | High-level perspective |\n\n### Productivity Skills\n\n| Skill | Path | Purpose |\n|-------|------|---------|\n| `caveman` | `skills/productivity/caveman/SKILL.md` | Simple productivity approach |\n| `grill-me` | `skills/productivity/grill-me/SKILL.md` | Self-review technique |\n| `write-a-skill` | `skills/productivity/write-a-skill/SKILL.md` | Creating new skills |\n\n## Usage in the System\n\nThe lockfile is read by the skill management system to:\n\n- **Resolve skills** — Map a skill name to its source location and file path\n- **Check for updates** — Compare current hashes against remote content to detect changes\n- **Verify integrity** — Validate that installed skills match their recorded hashes\n- **Install/Update** — Fetch skill content from the specified source and path\n\n```mermaid\ngraph LR\n A[skills-lock.json] --> B[Skill Resolver]\n B --> C{Resolve skill?}\n C -->|Found| D[Return skill metadata]\n C -->|Not found| E[Error: unknown skill]\n \n F[Update Check] --> A\n F --> G[Fetch from GitHub]\n G --> H{Hash matches?}\n H -->|Yes| I[Up to date]\n H -->|No| J[Update available]\n```\n\n## Adding a New Skill\n\nWhen a new skill is added to the system:\n\n1. The skill is fetched from its source repository\n2. A SHA-256 hash of the content is computed\n3. An entry is added to `skills-lock.json` with the source, path, and hash\n4. The lockfile is committed to preserve the exact version\n\n## Updating Skills\n\nTo update a skill:\n\n1. Fetch the latest content from the source\n2. Compute the new hash\n3. Update the `computedHash` field in the lockfile\n4. Commit the changes\n\nThe hash change acts as both a version marker and an integrity check.","other-startup-document-ci-sh":"# Other — startup.document.ci.sh\n\n# startup.document.ci.sh\n\n## Overview\n\n`startup.document.ci.sh` is the CI (Continuous Integration) startup and test runner script for the document service. It orchestrates the complete test environment setup—starting dependencies, seeding the database, launching the application, and running the full test suite.\n\nThis script is designed to run in an isolated CI environment (e.g., Docker Compose) where services like MongoDB and Maildev are available as network endpoints.\n\n## Purpose\n\nThe script serves two primary functions:\n\n1. **Environment Bootstrap** — Waits for all required infrastructure services to be ready before starting the application\n2. **CI Pipeline Execution** — Runs the complete validation suite: linting and end-to-end tests\n\n## Execution Flow\n\n```mermaid\nflowchart TD\n A[Wait for MongoDB<br/>mongo:27017] --> B[Seed Document Database]\n B --> C[Start Production Server<br/>Background]\n C --> D[Wait for Maildev<br/>maildev:1080]\n D --> E[Wait for App<br/>localhost:3000]\n E --> F[Run Linting]\n F --> G[Run E2E Tests<br/>--runInBand]\n```\n\n## Step-by-Step Breakdown\n\n### 1. Wait for MongoDB\n\n```bash\n/opt/wait-for-it.sh mongo:27017\n```\n\nBlocks until MongoDB is reachable on port 27017. The `wait-for-it.sh` utility is a standard health-check script that polls a TCP endpoint until it accepts connections.\n\n**Why this matters:** The seed script requires a running MongoDB instance. Starting the app before the database is available would cause failures.\n\n### 2. Seed the Document Database\n\n```bash\nnpm run seed:run:document\n```\n\nPopulates MongoDB with test data required for the document service tests. This ensures each test run starts with a known, consistent dataset.\n\n### 3. Start the Production Server\n\n```bash\nnpm run start:prod > prod.log 2>&1 &\n```\n\nLaunches the application in production mode, running in the background. Output is redirected to `prod.log`.\n\n- `> prod.log 2>&1` — Captures both stdout and stderr to a log file\n- `&` — Runs the process in the background so the script can continue\n\n### 4. Wait for Maildev\n\n```bash\n/opt/wait-for-it.sh maildev:1080\n```\n\nBlocks until the Maildev service (SMTP test server) is available on port 1080.\n\n**Why this matters:** The application likely sends emails (password resets, notifications, etc.). Maildev captures these for test verification.\n\n### 5. Wait for the Application\n\n```bash\n/opt/wait-for-it.sh localhost:3000\n```\n\nBlocks until the application is accepting HTTP requests on port 3000.\n\n**Why this matters:** The application needs to be fully initialized (database connections, middleware, etc.) before tests run. This is the final health check before test execution.\n\n### 6. Run Linting\n\n```bash\nnpm run lint\n```\n\nExecutes static code analysis. This runs before tests to catch style violations and basic code quality issues without spending time on test execution.\n\n### 7. Run End-to-End Tests\n\n```bash\nnpm run test:e2e -- --runInBand\n```\n\nExecutes the E2E test suite with `--runInBand`, which runs tests sequentially rather than in parallel.\n\n**Why `--runInBand`:**\n- Avoids port conflicts between test scenarios\n- Prevents race conditions in shared test data\n- Makes test output easier to read in CI logs\n- Ensures consistent, reproducible results\n\n## Dependencies\n\nThe script expects these services to be available in the CI environment:\n\n| Service | Endpoint | Purpose |\n|---------|----------|---------|\n| MongoDB | `mongo:27017` | Document database |\n| Maildev | `maildev:1080` | Email testing SMTP server |\n| Application | `localhost:3000` | The service under test |\n\nThese are typically defined in a `docker-compose.ci.yml` or similar orchestration file.\n\n## Exit Behavior\n\n- `set -e` at the top causes the script to exit immediately if any command fails\n- If any step fails (MongoDB unavailable, seed fails, lint errors, test failures), the script exits with a non-zero status\n- The background app process (`npm run start:prod`) continues running if the script fails mid-execution—CI runners typically clean up containers/pids anyway\n\n## Common Modifications\n\nWhen modifying this script for different CI scenarios:\n\n- **Add service health checks** — Insert additional `wait-for-it.sh` calls for new dependencies\n- **Skip linting** — Comment out or add a flag to skip `npm run lint` for faster iteration\n- **Parallel tests** — Remove `--runInBand` for faster execution (requires isolated test fixtures)\n- **Debug mode** — Remove `2>&1` redirection to see app output in CI logs in real-time\n\n## Relationship to Other Modules\n\nThis script is a top-level CI orchestration file. It invokes:\n\n- `npm run seed:run:document` — Database seeding module\n- `npm run start:prod` — Application entry point\n- `npm run lint` — Linting configuration\n- `npm run test:e2e` — E2E test suite\n\nIt does not call other shell scripts directly—it relies on `wait-for-it.sh` as an external utility.","other-startup-document-dev-sh":"# Other — startup.document.dev.sh\n\n# startup.document.dev.sh\n\n## Overview\n\n`startup.document.dev.sh` is the entry point script for initializing and launching the Document service in a production environment. It orchestrates the startup sequence by ensuring database availability, seeding initial data, and launching the application server.\n\n## Purpose\n\nThis script handles the complete startup lifecycle of the Document service:\n\n1. **Database Readiness** — Waits for MongoDB to be available before proceeding\n2. **Configuration Verification** — Outputs environment variables for debugging\n3. **Data Initialization** — Seeds the database with required initial data\n4. **Service Launch** — Starts the production server\n\n## Script Breakdown\n\n```bash\n#!/usr/bin/env bash\nset -e\n```\n\nThe script uses `bash` as the interpreter and `set -e` to exit immediately if any command fails, ensuring failures are caught early in the startup process.\n\n### Step 1: Wait for MongoDB\n\n```bash\n/opt/wait-for-it.sh mongo:27017\n```\n\nThis command blocks until MongoDB is reachable at `mongo:27017`. The `wait-for-it.sh` utility repeatedly attempts to connect to the MongoDB host, preventing the script from proceeding until the database is ready. This is critical in containerized environments where service startup order is not guaranteed.\n\n### Step 2: Display Environment Configuration\n\n```bash\ncat .env\n```\n\nOutputs the contents of the `.env` file to stdout. This serves as a verification step to confirm that environment variables are loaded correctly before the application starts. The output is visible in container logs, aiding debugging during startup issues.\n\n### Step 3: Seed Database\n\n```bash\nnpm run seed:run:document\n```\n\nExecutes the npm script responsible for populating the database with initial data. This typically includes creating default documents, setting up indexes, or establishing baseline configurations required for the service to function.\n\n### Step 4: Start Production Server\n\n```bash\nnpm run start:prod\n```\n\nLaunches the Document service in production mode. This is the final step — the script blocks here as the server runs.\n\n## Dependencies\n\n| Dependency | Purpose |\n|-------------|---------|\n| `/opt/wait-for-it.sh` | Service readiness checker |\n| MongoDB at `mongo:27017` | Primary database |\n| `.env` file | Environment configuration |\n| `npm run seed:run:document` | Database seeding script |\n| `npm run start:prod` | Production server startup |\n\n## Startup Sequence\n\n```mermaid\nflowchart TD\n A[Start Script] --> B[Wait for MongoDB]\n B --> C{MongoDB Ready?}\n C -->|No| B\n C -->|Yes| D[Display .env contents]\n D --> E[Run seed:run:document]\n E --> F[Run start:prod]\n F --> G[Service Running]\n```\n\n## Usage\n\nThis script is typically invoked by a container orchestrator or Docker Compose:\n\n```yaml\nservices:\n document-service:\n build: .\n command: [\"/app/startup.document.dev.sh\"]\n depends_on:\n - mongo\n```\n\n## Error Handling\n\nThe `set -e` directive ensures the script exits on the first failure. Common failure points include:\n\n- **MongoDB unavailable** — The script hangs in the wait-for-it step until timeout\n- **Missing `.env` file** — The `cat` command fails, stopping startup\n- **Seed failure** — Database initialization errors halt the process\n- **Server failure** — The production server exits with a non-zero code\n\n## Troubleshooting\n\nIf the service fails to start:\n\n1. Check that MongoDB is running and accessible at `mongo:27017`\n2. Verify the `.env` file exists and contains valid configuration\n3. Review seed script logs for database initialization errors\n4. Examine the production server logs for runtime issues","other-startup-document-test-sh":"# Other — startup.document.test.sh\n\n# startup.document.test.sh\n\n## Overview\n\nThis script is the entry point for setting up and launching the document test development environment. It orchestrates service readiness checks, dependency installation, database seeding, and application startup in a deterministic sequence.\n\n## Purpose\n\nThe script prepares a complete local development environment by:\n1. Ensuring external dependencies (MongoDB, Maildev) are available\n2. Installing application dependencies\n3. Populating the database with seed data\n4. Starting the development server\n\n## Execution Flow\n\n```mermaid\nflowchart TD\n A[Start] --> B[Wait for MongoDB:27017]\n B --> C[Wait for Maildev:1080]\n C --> D[npm install]\n D --> E[Seed Document Database]\n E --> F[Start Dev Server]\n\n B -.->|Retry until ready| B\n C -.->|Retry until ready| C\n```\n\n## Components\n\n### Service Wait Steps\n\n| Service | Port | Purpose |\n|---------|------|---------|\n| MongoDB | 27017 | Primary document database |\n| Maildev | 1080 | Email development/testing server |\n\nThe script uses `/opt/wait-for-it.sh` to poll each service. This ensures all dependencies are fully initialized before the application attempts to connect—critical in containerized or distributed development environments where service startup times vary.\n\n### npm install\n\nInstalls all JavaScript dependencies defined in `package.json`. This is a standard npm operation that resolves and downloads packages from the npm registry.\n\n### Seed Step\n\n```bash\nnpm run seed:run:document\n```\n\nThis runs a custom npm script that populates MongoDB with initial document data. The seeded data provides a consistent starting state for development and testing.\n\n### Startup Step\n\n```bash\nnpm run start:dev\n```\n\nLaunches the application in development mode with hot-reload capabilities.\n\n## Error Handling\n\nThe `set -e` directive causes the script to exit immediately if any command returns a non-zero exit status. This prevents the startup sequence from proceeding when earlier steps fail—for example, preventing the app from starting if database seeding fails.\n\n## Usage\n\nExecute directly with bash:\n\n```bash\nbash startup.document.test.sh\n```\n\nOr make it executable and run directly:\n\n```bash\nchmod +x startup.document.test.sh\n./startup.document.test.sh\n```\n\n## Dependencies\n\n- **wait-for-it.sh**: Must exist at `/opt/wait-for-it.sh`\n- **Node.js & npm**: Required for package installation and running npm scripts\n- **MongoDB**: External service must be reachable at `mongo:27017`\n- **Maildev**: External service must be reachable at `maildev:1080`\n\n## Integration\n\nThis script is typically invoked by Docker Compose, Kubernetes, or a development workflow manager to bootstrap the document test service. It assumes all services are on the same network and reachable via their container/host names (`mongo`, `maildev`).","other-startup-relational-ci-sh":"# Other — startup.relational.ci.sh\n\n# startup.relational.ci.sh\n\n## Overview\n\nThis script orchestrates the complete CI pipeline for the relational database (PostgreSQL) path of the application. It handles service readiness, database setup, application startup, and end-to-end testing in a sequential, blocking manner.\n\n## Purpose\n\nThe script serves as the entry point for continuous integration tests that require a full relational database stack. It ensures all dependencies are available and healthy before running tests, providing a reliable automated validation pipeline.\n\n## What It Does\n\n### 1. Wait for PostgreSQL\n\n```bash\n/opt/wait-for-it.sh postgres:5432\n```\n\nBlocks until PostgreSQL is accepting connections on port 5432. The `wait-for-it.sh` utility repeatedly attempts connections with a timeout, preventing premature migration execution.\n\n### 2. Run Database Migrations\n\n```bash\nnpm run migration:run\n```\n\nApplies all pending database migrations to the PostgreSQL schema. This ensures the database structure is current before seeding or running tests.\n\n### 3. Seed the Database\n\n```bash\nnpm run seed:run:relational\n```\n\nPopulates the database with test data required for end-to-end tests. The `relational` qualifier indicates this seeds the PostgreSQL-specific data (as opposed to other database backends).\n\n### 4. Start Production Server\n\n```bash\nnpm run start:prod > prod.log 2>&1 &\n```\n\nLaunches the application in production mode, running it as a background process. Output is redirected to `prod.log` to prevent blocking. The application listens on port 3000.\n\n### 5. Wait for Maildev\n\n```bash\n/opt/wait-for-it.sh maildev:1080\n```\n\nBlocks until Maildev (the email testing service) is ready on port 1080. Maildev captures outgoing emails during tests, enabling verification of email functionality.\n\n### 6. Wait for Application\n\n```bash\n/opt/wait-for-it.sh localhost:3000\n```\n\nBlocks until the production server is accepting HTTP requests. This ensures the application is fully initialized before proceeding to tests.\n\n### 7. Run Linting\n\n```bash\nnpm run lint\n```\n\nExecutes static code analysis to verify code quality and style compliance. Runs before tests to catch issues early.\n\n### 8. Run End-to-End Tests\n\n```bash\nnpm run test:e2e -- --runInBand\n```\n\nExecutes the end-to-end test suite. The `--runInBand` flag forces sequential execution (no parallelization), which is important for CI environments where tests may share state or resources.\n\n## Error Handling\n\nThe `set -e` directive at the top causes the script to exit immediately if any command returns a non-zero exit status. This ensures failures fail fast—the CI pipeline stops at the first error rather than continuing with a broken environment.\n\n## Dependencies\n\n| Service | Port | Purpose |\n|---------|------|---------|\n| PostgreSQL | 5432 | Relational database |\n| Maildev | 1080 | Email testing SMTP server |\n| Application | 3000 | Production HTTP server |\n\n## Usage\n\nExecute directly as a CI pipeline entry point:\n\n```bash\n./startup.relational.ci.sh\n```\n\nOr invoke via npm scripts defined in `package.json`:\n\n```bash\nnpm run ci:relational\n```\n\n## Relationship to Other Modules\n\nThis script is self-contained and makes no internal function calls. It operates by executing external commands:\n\n- **`/wait-for-it.sh`** — external utility for service readiness checks\n- **npm scripts** — delegates to package.json definitions for migrations, seeding, server startup, linting, and testing\n\nThe script assumes the following npm scripts exist in `package.json`:\n\n- `migration:run`\n- `seed:run:relational`\n- `start:prod`\n- `lint`\n- `test:e2e`\n\n## Execution Flow\n\n```mermaid\ngraph TD\n A[Start Script] --> B[Wait for PostgreSQL]\n B --> C[Run Migrations]\n C --> D[Seed Database]\n D --> E[Start Production Server<br/>in background]\n E --> F[Wait for Maildev]\n F --> G[Wait for Application<br/>on port 3000]\n G --> H[Run Linting]\n H --> I[Run E2E Tests]\n I --> J[Complete]\n \n style A fill:#f9f,stroke:#333\n style J fill:#9f9,stroke:#333\n```","other-startup-relational-dev-sh":"# Other — startup.relational.dev.sh\n\n# startup.relational.dev.sh\n\n## Overview\n\nThis script orchestrates the startup sequence for the relational database application in production. It ensures PostgreSQL is available, applies database migrations, seeds initial data, and launches the production server.\n\n## Purpose\n\nThe script provides a deterministic, sequential startup process that guarantees all prerequisites are met before the application starts:\n\n1. **Database availability** — Waits for PostgreSQL to accept connections\n2. **Schema readiness** — Applies all pending database migrations\n3. **Data initialization** — Seeds the database with required initial data\n4. **Application launch** — Starts the production server\n\n## How It Works\n\n### Startup Sequence\n\n```mermaid\nflowchart TD\n A[Start] --> B[wait-for-it.sh]\n B --> C{PostgreSQL ready?}\n C -- No --> B\n C -- Yes --> D[migration:run]\n D --> E[seed:run:relational]\n E --> F[start:prod]\n F --> G[Application Running]\n```\n\n### Component Breakdown\n\n| Step | Command | Description |\n|------|---------|-------------|\n| 1 | `/opt/wait-for-it.sh postgres:5432` | Blocks until PostgreSQL accepts TCP connections on port 5432 |\n| 2 | `npm run migration:run` | Executes pending database migrations via the npm script |\n| 3 | `npm run seed:run:relational` | Seeds the relational database with initial data |\n| 4 | `npm run start:prod` | Launches the application in production mode |\n\n### Error Handling\n\n- `set -e` — The script exits immediately if any command fails, preventing the application from starting with an incomplete or broken database state\n\n## Dependencies\n\n### External Scripts\n\n- `/opt/wait-for-it.sh` — TCP port wait utility (must be available in the container/image)\n\n### NPM Scripts\n\nThe script invokes the following npm scripts, which must be defined in `package.json`:\n\n- `migration:run` — Runs database migrations\n- `seed:run:relational` — Seeds relational database tables\n- `start:prod` — Starts the production server\n\n## Usage\n\n### Execution\n\nThe script is designed to run as the main container entrypoint:\n\n```bash\n./startup.relational.dev.sh\n```\n\nOr as an ENTRYPOINT in a Dockerfile:\n\n```dockerfile\nENTRYPOINT [\"/bin/bash\", \"/path/to/startup.relational.dev.sh\"]\n```\n\n### Requirements\n\n- PostgreSQL must be accessible at the hostname `postgres` on port 5432\n- All npm scripts referenced must be defined in `package.json`\n- The wait-for-it script must exist at `/opt/wait-for-it.sh`\n\n## Integration\n\nThis script acts as the entry point for the production relational database service. It assumes:\n\n- A containerized environment (implied by the wait-for-it pattern and `/opt/` path)\n- PostgreSQL as the relational database\n- A Node.js runtime (implied by `npm` commands)\n\nNo incoming calls are expected — this is a top-level orchestration script that runs independently.","other-startup-relational-test-sh":"# Other — startup.relational.test.sh\n\n# startup.relational.test.sh\n\n## Overview\n\nThis script initializes and launches the development environment for the relational database test configuration. It orchestrates a linear startup sequence that waits for external services, installs dependencies, sets up the database schema, populates test data, and starts the development server.\n\n## Purpose\n\nThe script serves as the entry point for running the application with a relational (PostgreSQL) backend in development mode. It ensures all prerequisites are satisfied before the application starts:\n\n- External services (PostgreSQL, Maildev) are reachable\n- Node dependencies are installed\n- Database schema is migrated\n- Database is seeded with test data\n\n## Prerequisites\n\nThe script expects the following services to be available on the network:\n\n| Service | Port | Purpose |\n|---------|------|---------|\n| PostgreSQL | 5432 | Primary relational database |\n| Maildev | 1080 | Email testing server (SMTP catch-all) |\n\nBoth services must be reachable via DNS names `postgres` and `maildev` (typical in Docker Compose environments).\n\n## Execution Sequence\n\n```mermaid\nflowchart LR\n A[Wait for PostgreSQL] --> B[Wait for Maildev] --> C[npm install] --> D[Run Migrations] --> E[Seed Database] --> F[Start Dev Server]\n```\n\n### 1. Wait for PostgreSQL\n\n```bash\n/opt/wait-for-it.sh postgres:5432\n```\n\nBlocks execution until PostgreSQL is accepting connections on port 5432. The `wait-for-it.sh` script is a TCP connection wait utility that retries until the service is available or times out.\n\n### 2. Wait for Maildev\n\n```bash\n/opt/wait-for-it.sh maildev:1080\n```\n\nBlocks execution until the Maildev web interface is reachable on port 1080. This ensures email-dependent tests can run successfully.\n\n### 3. Install Dependencies\n\n```bash\nnpm install\n```\n\nInstalls Node.js packages defined in `package.json`. This is required before running migrations or starting the server.\n\n### 4. Run Migrations\n\n```bash\nnpm run migration:run\n```\n\nExecutes database migrations to create or update the schema. This command is defined in `package.json` and typically uses an ORM or migration tool (such as TypeORM, Knex, or Sequelize).\n\n### 5. Seed Database\n\n```bash\nnpm run seed:run:relational\n```\n\nPopulates the database with initial test data specific to the relational database configuration. The `relational` suffix indicates this seeder is designed for PostgreSQL, distinguishing it from other database backends.\n\n### 6. Start Development Server\n\n```bash\nnpm run start:dev\n```\n\nLaunches the application in development mode, typically with hot-reload enabled.\n\n## Error Handling\n\nThe script uses `set -e` to exit immediately on any command failure. If any step in the sequence fails (service unavailable, npm install error, migration failure, seed failure), the script terminates without proceeding to subsequent steps.\n\n## Integration Points\n\nThis script is part of a larger startup infrastructure:\n\n- **package.json**: Defines the npm scripts referenced (`migration:run`, `seed:run:relational`, `start:dev`)\n- **wait-for-it.sh**: External utility script located at `/opt/wait-for-it.sh`\n- **Docker Compose**: Typically provides the `postgres` and `maildev` services (implied by service names)\n\n## Common Issues\n\n| Issue | Cause | Solution |\n|-------|-------|----------|\n| Timeout waiting for PostgreSQL | Database container not running | Verify Docker Compose services are started |\n| Migration fails | Schema mismatch or missing permissions | Check database user permissions |\n| Seed fails | Migration not applied or empty database | Ensure migrations run successfully first |\n\n## Modifying the Script\n\nTo add additional startup checks or services, insert new `wait-for-it.sh` commands at the appropriate position in the sequence:\n\n```bash\n#!/usr/bin/env bash\nset -e\n\n/opt/wait-for-it.sh postgres:5432\n/opt/wait-for-it.sh maildev:1080\n# Add new service checks here\nnpm install\nnpm run migration:run\nnpm run seed:run:relational\nnpm run start:dev\n```","other-superpowers":"# Other — superpowers\n\n# Symphony Module\n\nSymphony is an automation service that autonomously resolves Plane.so issues using Google Jules (via REST API) as the coding agent, with GitHub or GitLab as the version control system. It implements a **Split-Harness** architecture that separates the intra-session agent lifecycle (owned by Jules) from the inter-session orchestration lifecycle (owned by Symphony).\n\n## Purpose\n\nSymphony bridges three external systems:\n\n1. **Plane.so** — Issue tracker that provides work items and accepts state updates\n2. **Google Jules** — AI coding agent that executes tasks within a session\n3. **GitHub/GitLab** — Version control where code changes are","other-test":"# Other — test\n\n# Jest E2E Test Configuration\n\n## Overview\n\nThis module (`test/jest-e2e.json`) provides the Jest configuration for end-to-end (E2E) tests in the application. It defines how Jest should discover, transform, and execute E2E test files.\n\n## Configuration Reference\n\n| Property | Value | Purpose |\n|----------|-------|---------|\n| `moduleFileExtensions` | `[\"js\", \"json\", \"ts\"]` | Supported file extensions for test modules |\n| `rootDir` | `\".\"` | Root directory for test files (relative to this config file) |\n| `testEnvironment` | `\"node\"` | Test execution environment |\n| `testRegex` | `\".e2e-spec.ts$\"` | Pattern to match E2E test files |\n| `transform` | `ts-jest` | TypeScript/JavaScript transformation |\n\n## Key Configuration Details\n\n### Test File Pattern\n\nThe `testRegex` pattern `.e2e-spec.ts$` matches any TypeScript file ending with `.e2e-spec.ts`. This naming convention distinguishes E2E tests from other test types (unit tests, integration tests) in the codebase.\n\n### TypeScript Support\n\nThe `transform` configuration uses `ts-jest` to transpile TypeScript test files on-the-fly, allowing E2E tests to be written in TypeScript without a separate build step.\n\n### Node Environment\n\nSetting `testEnvironment` to `\"node\"` runs tests in a Node.js runtime rather than a browser environment. This is typical for API-level E2E tests or tests that don't require DOM manipulation.\n\n## Usage\n\nRun E2E tests using the standard Jest CLI with this configuration:\n\n```bash\nnpx jest --config test/jest-e2e.json\n```\n\nOr via npm scripts (if configured in `package.json`):\n\n```bash\nnpm run test:e2e\n```\n\n## Test File Structure\n\nE2E test files should be placed in the `test/` directory (or subdirectories) and follow the naming convention:\n\n```\ntest/\n├── some-feature.e2e-spec.ts\n├── another-feature.e2e-spec.ts\n└── ...\n```\n\n## Relationship to Other Test Configurations\n\nThis configuration is specific to E2E tests. The codebase may include separate Jest configurations for other test types:\n\n- **Unit tests**: Typically use a different `testRegex` pattern (e.g., `.spec.ts`)\n- **Integration tests**: May have their own configuration or share the E2E config\n\n```mermaid\nflowchart TD\n A[Jest CLI] --> B[test/jest-e2e.json]\n B --> C{testRegex}\n C -->|Match| D[.e2e-spec.ts files]\n C -->|No match| E[Ignored]\n D --> F[ts-jest Transform]\n F --> G[Execute in Node Environment]\n G --> H[Test Results]\n```\n\n## Extending the Configuration\n\nTo add custom behavior, modify the JSON configuration. Common extensions include:\n\n- **`setupFilesAfterEnv`**: Run setup code after test framework is initialized\n- **`globalSetup`/`globalTeardown`**: Execute code once before/after all tests\n- **`coverageDirectory`**: Specify where coverage reports are written\n- **`testTimeout`**: Increase default timeout for slow E2E tests\n\nExample extension:\n\n```json\n{\n \"moduleFileExtensions\": [\"js\", \"json\", \"ts\"],\n \"rootDir\": \".\",\n \"testEnvironment\": \"node\",\n \"testRegex\": \".e2e-spec.ts$\",\n \"transform\": {\n \"^.+\\\\.(t|j)s$\": \"ts-jest\"\n },\n \"setupFilesAfterEnv\": [\"<rootDir>/test/setup.ts\"],\n \"testTimeout\": 10000\n}\n```","other-tsconfig-build-json":"# Other — tsconfig.build.json\n\n# tsconfig.build.json\n\nA TypeScript configuration file optimized for production builds, excluding test files and build artifacts from compilation.\n\n## Overview\n\nThis configuration extends the base `tsconfig.json` and customizes it for build-time compilation. It ensures that test files, output directories, and third-party code are excluded from the compiled output, resulting in a cleaner and smaller production build.\n\n## Configuration Details\n\n```json\n{\n \"extends\": \"./tsconfig.json\",\n \"exclude\": [\"node_modules\", \"test\", \"dist\", \"**/*spec.ts\"]\n}\n```\n\n### Key Properties\n\n| Property | Purpose |\n|----------|---------|\n| `extends` | Inherits all compiler options from the base `tsconfig.json` |\n| `exclude` | Prevents specified paths from being included in compilation |\n\n### Excluded Paths\n\n| Path | Reason |\n|------|--------|\n| `node_modules` | Third-party dependencies installed separately; do not need recompilation |\n| `test` | Test utilities and test runner configurations are not needed in production |\n| `dist` | Prevents re-compiling previously built output (avoids circular builds) |\n| `**/*spec.ts` | Excludes all test specification files matching the pattern |\n\n## Relationship to Base Configuration\n\n```mermaid\nflowchart TB\n tsconfig[tsconfig.json<br/>Base config] --> extends\n tsconfigBuild[tsconfig.build.json<br/>Build config] --> extends\n extends --> compiler[TypeScript Compiler]\n compiler --> output[Compiled Output]\n \n style tsconfigBuild fill:#e1f5fe\n style output fill:#e8f5e9\n```\n\nThe build configuration inherits all compiler options from the base configuration and applies additional path exclusions. This separation allows:\n\n- **Development**: Use `tsconfig.json` for IDE support, type checking, and running tests\n- **Production**: Use `tsconfig.build.json` for creating optimized build outputs\n\n## Usage\n\nThis file is typically referenced by build tools or CI/CD pipelines:\n\n```bash\n# Compile using build-specific config\ntsc -p tsconfig.build.json\n\n# Or via npm scripts\nnpm run build\n```\n\n## When to Modify\n\n- Add new output directories to the `exclude` array (e.g., `build`, `.next`)\n- Exclude additional test file patterns (e.g., `**/*.test.ts`)\n- Add new source directories that should not be included in production builds","other-tsconfig-json":"# Other — tsconfig.json\n\n# tsconfig.json — TypeScript Configuration\n\n## Overview\n\nThis `tsconfig.json` configures the TypeScript compiler for the project. It defines how TypeScript source files are compiled to JavaScript, what output format to use, and which type-checking rules to enforce.\n\nThis configuration targets a **Node.js** environment using CommonJS modules, with decorator support for frameworks like TypeORM.\n\n## Configuration Summary\n\n| Category | Setting |\n|----------|---------|\n| Module System | CommonJS |\n| Target | ES2021 |\n| Output Directory | `./dist` |\n| Type Definitions | Enabled (`.d.ts` files) |\n| Source Maps | Enabled |\n| Strict Mode | Partial (strictNullChecks enabled, others relaxed) |\n\n## Compiler Options\n\n### Module & Output\n\n```json\n\"module\": \"commonjs\",\n\"target\": \"ES2021\",\n\"outDir\": \"./dist\",\n\"moduleResolution\": \"node\"\n```\n\n- **`module: \"commonjs\"`** — Outputs JavaScript using CommonJS `require()`/`module.exports`. This is the standard for Node.js applications.\n- **`target: \"ES2021\"`** — The JavaScript version to compile to. Modern Node.js versions (v16+) support ES2021 features.\n- **`outDir: \"./dist\"`** — Compiled JavaScript files are placed in the `dist` directory, keeping source and output separate.\n\n### Decorator Support\n\n```json\n\"experimentalDecorators\": true,\n\"emitDecoratorMetadata\": true\n```\n\nThese settings enable TypeScript's decorator syntax, commonly used with:\n\n- **TypeORM** — for entity decorators like `@Entity()`, `@Column()`, `@PrimaryColumn()`\n- **class-validator** — for validation decorators like `@IsString()`, `@IsEmail()`\n- **routing-controllers** — for controller and route decorators\n\n`emitDecoratorMetadata` emits additional type information that frameworks can use at runtime (e.g., for dependency injection).\n\n### Type Definitions\n\n```json\n\"declaration\": true,\n\"skipLibCheck\": true\n```\n\n- **`declaration: true`** — Generates `.d.ts` type definition files alongside compiled JavaScript. This allows other TypeScript projects to import this code with full type information.\n- **`skipLibCheck: true`** — Skips type checking of declaration files in `node_modules/@types`. This significantly speeds up compilation and avoids errors from third-party type definitions.\n\n### Source Maps & Comments\n\n```json\n\"sourceMap\": true,\n\"removeComments\": true\n```\n\n- **`sourceMap: true`** — Generates `.map` files for debugging, allowing you to set breakpoints in TypeScript source while debugging compiled JavaScript.\n- **`removeComments: true`** — Strips all comments from the output JavaScript, reducing file size.\n\n### ES Module Interoperability\n\n```json\n\"esModuleInterop\": true,\n\"allowSyntheticDefaultImports\": true\n```\n\nThese settings allow more flexible imports from CommonJS modules, including default imports from modules that don't explicitly export a default:\n\n```typescript\n// Works even if 'express' doesn't have 'export default'\nimport express from 'express';\n```\n\n### Strictness Settings\n\n```json\n\"strictNullChecks\": true,\n\"noImplicitAny\": false,\n\"strictBindCallApply\": false,\n\"forceConsistentCasingInFileNames\": false,\n\"noFallthroughCasesInSwitch\": false\n```\n\n| Option | Value | Effect |\n|--------|-------|--------|\n| `strictNullChecks` | `true` | `null` and `undefined` must be handled explicitly |\n| `noImplicitAny` | `false` | Allows implicit `any` types (less strict) |\n| `strictBindCallApply` | `false` | Less strict typing on `.bind()`, `.call()`, `.apply()` |\n| `forceConsistentCasingInFileNames` | `false` | Allows mixed-case filenames |\n| `noFallthroughCasesInSwitch` | `false` | Allows fallthrough in switch statements |\n\nThe configuration enables **strict null checks** (catches common `undefined` errors) while relaxing other strict options for developer flexibility.\n\n### Incremental Compilation\n\n```json\n\"incremental\": true\n```\n\nEnables incremental compilation, which caches build information to speed up subsequent compilations. The compiler stores cache data in `./tsconfig.tsbuildinfo`.\n\n## Build Workflow\n\n```\nsrc/*.ts ──(tsc)──> dist/*.js\n + dist/*.js.map (source maps)\n + dist/*.d.ts (type definitions)\n```\n\n1. TypeScript reads this configuration\n2. Compiles `.ts` files in `src/` (or project root)\n3. Outputs JavaScript, source maps, and type definitions to `dist/`\n4. Subsequent builds use incremental compilation for speed\n\n## Integration with Other Tools\n\nThis `tsconfig.json` works alongside:\n\n- **ts-node** — Executes TypeScript directly without manual compilation\n- **Jest** — Uses this config for type checking during tests (with `ts-jest`)\n- **ESLint** — May reference this for TypeScript-aware linting\n- **IDE (VS Code)** — Uses this for IntelliSense and inline errors\n\n## Modifying This Configuration\n\n### To enable full strict mode:\n\n```json\n{\n \"compilerOptions\": {\n \"strict\": true,\n \"noImplicitAny\": true,\n \"strictBindCallApply\": true,\n \"forceConsistentCasingInFileNames\": true,\n \"noFallthroughCasesInSwitch\": true\n }\n}\n```\n\n### To compile for a browser (AMD modules):\n\n```json\n{\n \"compilerOptions\": {\n \"module\": \"amd\",\n \"target\": \"ES5\"\n }\n}\n```\n\n### To include additional files or exclude patterns:\n\n```json\n{\n \"include\": [\"src/**/*\"],\n \"exclude\": [\"node_modules\", \"dist\", \"**/*.spec.ts\"]\n}\n```","other-user":"# Other — user\n\n# Auth E2E Test Module\n\n## Overview\n\nThis module contains end-to-end (E2E) tests for the authentication system. It validates the complete user authentication flow, from registration through login, email confirmation, profile management, and account deletion.\n\nThe tests use **Supertest** to make HTTP requests against the running application, and they integrate with a mail server mock to verify email-based workflows.\n\n## Test Environment\n\n### Dependencies\n\n| Constant | Source | Purpose |\n|----------|--------|---------|\n| `APP_URL` | `../utils/constants` | Base URL of the running application |\n| `TESTER_EMAIL` | `../utils/constants` | Pre-existing test user email |\n| `TESTER_PASSWORD` | `../utils/constants` | Pre-existing test user password |\n| `MAIL_HOST` | `../utils/constants` | Mail server hostname |\n| `MAIL_PORT` | `../utils/constants` | Mail server port |\n\nThe tests also connect to a mail server at `http://${MAIL_HOST}:${MAIL_PORT}` to retrieve confirmation emails sent during registration and email change flows.\n\n## Test Coverage\n\n### Authentication Flow Diagram\n\n```mermaid\nsequenceDiagram\n participant User\n participant API as /api/v1/auth\n participant Mail as Mail Server\n\n User->>API: POST /email/register\n API-->>Mail: Send confirmation email\n API-->>User: 204 No Content\n \n User->>API: POST /email/login (unconfirmed)\n API-->>User: 200 + token\n \n User->>Mail: Get confirmation email\n Mail-->>User: Return email with hash\n \n User->>API: POST /email/confirm (with hash)\n API-->>User: 204 Email confirmed\n \n User->>API: POST /email/login (confirmed)\n API-->>User: 200 + token + refreshToken\n \n User->>API: GET /me\n API-->>User: 200 + user profile\n \n User->>API: PATCH /me (update profile)\n API-->>User: 200 Profile updated\n \n User->>API: DELETE /me\n API-->>User: 200 Account deleted\n```\n\n### Test Suites\n\n#### 1. Registration Tests\n\n| Test | Endpoint | Expected Result |\n|------|----------|-----------------|\n| Fail with existing email | `POST /api/v1/auth/email/register` | 422 with email error |\n| Successful registration | `POST /api/v1/auth/email/register` | 204 No Content |\n\nThe registration test creates dynamic user data using timestamps to ensure unique emails:\n\n```typescript\nconst newUserEmail = `User.${Date.now()}@example.com`;\nconst newUserFirstName = `Tester${Date.now()}`;\n```\n\n#### 2. Login Tests\n\n| Test | Endpoint | Expected Result |\n|------|----------|-----------------|\n| Login with unconfirmed email | `POST /api/v1/auth/email/login` | 200 + token (limited access) |\n| Login with confirmed email | `POST /api/v1/auth/email/login` | 200 + token + refreshToken + user data |\n\nLogin responses for confirmed users include:\n- `token` - JWT access token\n- `refreshToken` - Token for obtaining new access tokens\n- `tokenExpires` - Expiration timestamp\n- `user` - User profile object (excluding sensitive fields)\n\n#### 3. Email Confirmation Tests\n\n| Test | Endpoint | Expected Result |\n|------|----------|-----------------|\n| Confirm email successfully | `POST /api/v1/auth/email/confirm` | 204 No Content |\n| Confirm already confirmed email | `POST /api/v1/auth/email/confirm` | 404 Not Found |\n\nThe tests extract the confirmation hash from emails by matching the pattern:\n```typescript\n/.*confirm\\-email\\?hash\\=(\\S+).*/g\n```\n\n#### 4. Profile Management Tests\n\n| Test | Endpoint | Method |\n|------|----------|--------|\n| Retrieve own profile | `/api/v1/auth/me` | GET |\n| Update profile | `/api/v1/auth/me` | PATCH |\n| Update email | `/api/v1/auth/me` | PATCH |\n| Delete account | `/api/v1/auth/me` | DELETE |\n\n**Profile Retrieval** (`GET /api/v1/auth/me`):\n- Requires Bearer token authentication\n- Returns user profile with `provider`, `email` fields\n- Explicitly excludes `hash` and `password` from response\n\n**Profile Update** (`PATCH /api/v1/auth/me`):\n- Password changes require `oldPassword` field\n- Without `oldPassword`, returns 422 validation error\n- With valid `oldPassword`, returns 200 and updates successfully\n\n**Email Change Flow**:\n1. User submits new email via `PATCH /api/v1/auth/me`\n2. API returns 200 but email remains unchanged until confirmed\n3. New confirmation email sent to new email address\n4. User must call `POST /api/v1/auth/email/confirm/new` with hash\n5. After confirmation, login works with new email\n\n#### 5. Token Refresh Tests\n\n| Test | Endpoint | Expected Result |\n|------|----------|-----------------|\n| Get new refresh token | `POST /api/v1/auth/refresh` | 200 + new token + new refreshToken |\n| Reuse refresh token | `POST /api/v1/auth/refresh` | 401 Unauthorized |\n\nRefresh tokens are single-use:\n- First call returns new tokens\n- Second call with same token returns 401\n- This prevents token replay attacks\n\n## Test Data Cleanup\n\nTests use dynamic data generation to avoid conflicts:\n\n```typescript\nconst newUserEmail = `User.${Date.now()}@example.com`;\n```\n\nHowever, the final test **deletes the test user** to ensure cleanup:\n\n```typescript\nawait request(app).delete('/api/v1/auth/me').auth(newUserApiToken, {\n type: 'bearer',\n});\n```\n\n## Running the Tests\n\n```bash\n# Run all auth E2E tests\nnpm run test:e2e -- test/user/auth.e2e-spec.ts\n\n# Or with Jest directly\nnpx jest test/user/auth.e2e-spec.ts\n```\n\n## Connection to Source Code\n\nThe tests interact with the Auth Controller at `src/auth/auth.controller.ts`. The primary endpoint under test is:\n\n```\nDELETE /api/v1/auth/me\n```\n\nThis endpoint handles account deletion and is the outgoing call identified in the call graph. The tests verify that:\n\n1. Authenticated users can delete their own accounts\n2. Deleted accounts cannot log in (returns 422)\n\n## Key Assertions\n\n### Security Validations\n\nThe tests verify that sensitive data is never returned in API responses:\n\n```typescript\nexpect(body.user.hash).not.toBeDefined();\nexpect(body.user.password).not.toBeDefined();\nexpect(body.hash).not.toBeDefined();\nexpect(body.password).not.toBeDefined();\n```\n\n### State Transitions\n\nEach test validates the authentication state machine:\n- Unconfirmed user → Limited login access\n- Confirmed user → Full login access with refresh tokens\n- Deleted user → Cannot authenticate","other-utils":"# Other — utils\n\n# Test Utilities — Constants Module\n\n## Overview\n\nThe `test/utils/constants.ts` module exports shared configuration values used throughout the test suite. These constants provide centralized access to test credentials, application URLs, and external service endpoints needed by integration and end-to-end tests.\n\n## Exported Constants\n\n### Application Configuration\n\n```typescript\nexport const APP_URL = `http://localhost:${process.env.APP_PORT}`;\n```\n\nThe base URL for the application under test. The port is read from the `APP_PORT` environment variable, allowing the test suite to connect to the application regardless of which port it's running on.\n\n### Test User Credentials\n\n```typescript\nexport const TESTER_EMAIL = 'john.doe@example.com';\nexport const TESTER_PASSWORD = 'secret';\n```\n\nCredentials for a standard (non-privileged) test user. Used when tests require authenticated requests as a regular user—creating resources, viewing non-admin content, or testing permission boundaries.\n\n### Admin User Credentials\n\n```typescript\nexport const ADMIN_EMAIL = 'admin@example.com';\nexport const ADMIN_PASSWORD = 'secret';\n```\n\nCredentials for an administrator test user. Used when tests require elevated permissions, such as accessing admin-only endpoints, modifying system settings, or verifying role-based access control.\n\n### Mail Server Configuration\n\n```typescript\nexport const MAIL_HOST = process.env.MAIL_HOST;\nexport const MAIL_PORT = process.env.MAIL_CLIENT_PORT;\n```\n\nConnection details for the test mail server. These values are read from environment variables since the mail server is an external service that may run on different hosts/ports depending on the test environment setup.\n\n## Usage\n\nImport constants directly into test files:\n\n```typescript\nimport { APP_URL, TESTER_EMAIL, TESTER_PASSWORD, ADMIN_EMAIL, ADMIN_PASSWORD } from './utils/constants';\n\ndescribe('Authentication', () => {\n it('allows regular users to sign in', async () => {\n const response = await request(APP_URL)\n .post('/auth/login')\n .send({ email: TESTER_EMAIL, password: TESTER_PASSWORD };\n \n expect(response.status).toBe(200);\n });\n\n it('allows admin users to access admin routes', async () => {\n const response = await request(APP_URL)\n .get('/admin/users')\n .auth(ADMIN_EMAIL, ADMIN_PASSWORD);\n \n expect(response.status).toBe(200);\n });\n});\n```\n\n## Environment Requirements\n\nThe following environment variables must be set before running tests:\n\n| Variable | Purpose |\n|----------|---------|\n| `APP_PORT` | Port where the application server is running |\n| `MAIL_HOST` | Hostname of the test mail server |\n| `MAIL_CLIENT_PORT` | Port for connecting to the mail server |\n\n## Design Notes\n\n- **No internal dependencies**: This module has no imports, making it safe to load at any point in the test initialization sequence\n- **Centralized credentials**: All test accounts are defined in one place, making it easy to rotate passwords or add new test users\n- **Environment-driven configuration**: External service connections use environment variables to support different test environments (local, CI, containerized)","other-wait-for-it-sh":"# Other — wait-for-it.sh\n\n# wait-for-it.sh\n\nA Bash utility script that waits for a TCP host and port to become available before executing a command. Commonly used in containerized applications to coordinate service startup order.\n\n## Overview\n\nWhen running distributed applications, services often have dependencies on other services (e.g., an application waiting for a database). This script provides a reliable way to block until a dependent service is ready.\n\n```bash\n# Block until MySQL is available on port 3306, then start the app\n./wait-for-it.sh db:3306 --timeout=30 -- ./start-app.sh\n```\n\n## Usage\n\n```bash\nwait-for-it.sh host:port [options] [-- command args]\n```\n\n### Options\n\n| Option | Description |\n|--------|-------------|\n| `-h HOST` / `--host=HOST` | Host to check |\n| `-p PORT` / `--port=PORT` | Port to check |\n| `-s` / `--strict` | Only execute command if the connection test succeeds |\n| `-q` / `--quiet` | Suppress all status messages |\n| `-t TIMEOUT` / `--timeout=TIMEOUT` | Maximum wait time in seconds (default: 15, 0 for no timeout) |\n| `-- COMMAND ARGS` | Command to execute after the test succeeds |\n\n### Examples\n\n**Basic usage with host:port syntax:**\n```bash\n./wait-for-it.sh redis:6379 -- ./run-app.sh\n```\n\n**Separate host and port with timeout:**\n```bash\n./wait-for-it.sh --host=mysql --port=3306 --timeout=60 -- ./migrate.sh\n```\n\n**Quiet mode with strict execution:**\n```bash\n./wait-for-it.sh elasticsearch:9200 -q -s -- ./index-data.sh\n```\n\n## How It Works\n\n### Core Mechanism\n\nThe script uses two methods to test TCP connectivity:\n\n1. **`nc` (netcat)** — When available, uses `nc -z` for connection testing\n2. **`/dev/tcp`** — Bash's built-in TCP device (fallback method)\n\n```bash\n# Method 1: Using netcat\nnc -z $WAITFORIT_HOST $WAITFORIT_PORT\n\n# Method 2: Using /dev/tcp (Bash built-in)\n(echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1\n```\n\n### Execution Flow\n\n```mermaid\nflowchart TD\n A[Parse CLI arguments] --> B{Timeout > 0?}\n B -->|Yes| C[Run wait_for_wrapper]\n B -->|No| D[Run wait_for directly]\n C --> E[Spawn subprocess with timeout]\n D --> F[Poll connection loop]\n E --> G{Clean exit?}\n F --> H{Port available?}\n G -->|Yes| I[Return subprocess result]\n G -->|No| J[Exit with error]\n H -->|Yes| K[Execute command if provided]\n H -->|No| F\n K --> L[Exit with command result]\n```\n\n### Polling Loop\n\nThe `wait_for` function runs a simple polling loop:\n\n1. Records the start timestamp\n2. Attempts TCP connection every 1 second\n3. On success, calculates elapsed time and exits\n4. If timeout is reached, exits with non-zero status\n\n### Signal Handling\n\nThe `wait_for_wrapper` function handles interrupts gracefully using `trap`:\n\n```bash\ntrap \"kill -INT -$WAITFORIT_PID\" INT\n```\n\nThis ensures that pressing Ctrl-C properly terminates both the wrapper and the child process.\n\n### BusyBox Compatibility\n\nThe script detects if `timeout` comes from BusyBox (common in Alpine Linux containers) and adjusts accordingly:\n\n```bash\nif [[ $WAITFORIT_TIMEOUT_PATH =~ \"busybox\" ]]; then\n WAITFORIT_ISBUSY=1\n # Check for -t flag support in newer Alpine versions\n if timeout &>/dev/stdout | grep -q -e '-t '; then\n WAITFORIT_BUSYTIMEFLAG=\"-t\"\n fi\nfi\n```\n\n## Key Variables\n\n| Variable | Default | Purpose |\n|----------|---------|---------|\n| `WAITFORIT_TIMEOUT` | 15 | Seconds to wait before giving up |\n| `WAITFORIT_STRICT` | 0 | If 1, refuses to run command on failure |\n| `WAITFORIT_CHILD` | 0 | Internal flag for subprocess mode |\n| `WAITFORIT_QUIET` | 0 | If 1, suppresses all output |\n| `WAITFORIT_ISBUSY` | 0 | Set to 1 when BusyBox timeout is detected |\n\n## Exit Codes\n\n| Code | Meaning |\n|------|---------|\n| 0 | Port became available (or strict mode succeeded) |\n| 1 | Timeout occurred, or port never became available |\n| 124 | Subprocess timed out (from BusyBox/GNU timeout) |\n\n## Integration Examples\n\n### Docker Compose\n\n```yaml\nservices:\n db:\n image: postgres:15\n # ... db config\n\n app:\n build: .\n depends_on:\n db:\n condition: service_healthy\n command: ./wait-for-it.sh db:5432 -- ./start.sh\n healthcheck:\n test: [\"CMD\", \"./healthcheck.sh\"]\n interval: 5s\n retries: 5\n```\n\n### Kubernetes Init Container\n\n```yaml\nspec:\n initContainers:\n - name: wait-for-db\n image: busybox:1.36\n command: ['sh', '-c', './wait-for-it.sh database:5432 --timeout=30']\n containers:\n - name: app\n image: myapp:latest\n```\n\n## Limitations\n\n- **TCP only**: Does not support UDP or other protocols\n- **Single port**: Can only wait for one host:port combination\n- **No authentication**: Cannot test services requiring credentials during the wait\n- **Bash required**: Relies on Bash-specific features (`/dev/tcp`, `[[` syntax)\n\nFor more complex scenarios (multiple services, authentication, UDP), consider alternatives like:\n- [dockerize](https://github.com/jwilder/dockerize)\n- [wait-for](https://github.com/Eficode/wait-for)\n- [docker-compose wait](https://github.com/ufoscout/docker-compose-wait)","other":"# Other\n\n# Other Module\n\nThe **Other** module contains the project's configuration, infrastructure, tooling, and documentation. These files don't implement business logic but provide the environment, automation, and guidance needed to develop, test, and deploy the application.\n\n## Module Organization\n\nThis module groups related files into functional categories:\n\n```\nOther/\n├── Configuration # Project metadata, tooling, and AI assistant guidance\n├── Docker # Container images and orchestration\n├── Environment # Environment variable templates\n├── Scripts # Startup and CI automation\n├── Documentation # Project docs, agents, and knowledge graphs\n└── Tests # E2E test configuration and utilities\n```\n\n## How the Sub-Modules Work Together\n\nThe files in this module form a complete development and deployment pipeline:\n\n```mermaid\nflowchart TD\n subgraph \"Configuration\"\n A[package.json] --> B[Dockerfiles]\n A --> C[docker-compose.yaml]\n A --> D[tsconfig.json]\n end\n \n subgraph \"Environment\"\n E[env-example-*] --> C\n E --> F[startup scripts]\n end\n \n subgraph \"Container Orchestration\"\n C --> G[Services: API, DB, Maildev, Adminer]\n B --> G\n end\n \n subgraph \"Startup & Testing\"\n F --> G\n G --> H[E2E Tests]\n end\n \n subgraph \"Documentation\"\n I[README.md] --> J[CONTEXT.md]\n I --> K[CLAUDE.md]\n I --> L[GEMINI.md]\n end\n```\n\n### 1. Configuration Layer\n\nThe foundation is set by configuration files that define how the project builds and behaves:\n\n- **[package.json](package-json.md)** — Defines dependencies, npm scripts, and project metadata\n- **[tsconfig.json](tsconfig-json.md)** and **[tsconfig.build.json](tsconfig-build-json.md)** — TypeScript compiler settings\n- **[nest-cli.json](nest-cli-json.md)** — NestJS CLI configuration for code generation\n- **[eslint.config.mjs](eslint-config-mjs.md)** — Linting rules and code quality enforcement\n- **[commitlint.config.js](commitlint-config-js.md)** — Git commit message conventions\n- **[renovate.json](renovate-json.md)** — Automated dependency updates\n\n### 2. Container Infrastructure\n\nDocker files provide consistent environments across development, testing, and production:\n\n**Main Dockerfiles:**\n- **[Dockerfile](dockerfile.md)** — Production image for relational database variant\n- **[document.Dockerfile](document-dockerfile.md)** — Production image for document database variant\n\n**Test Dockerfiles:**\n- **[relational.e2e.Dockerfile](relational-e2e-dockerfile.md)**, **[document.e2e.Dockerfile](document-e2e-dockerfile.md)** — E2E test environments\n- **[relational.test.Dockerfile](relational-test-dockerfile.md)**, **[document.test.Dockerfile](document-test-dockerfile.md)** — Integration test environments\n- **[maildev.Dockerfile](maildev-dockerfile.md)** — Email testing service\n\n**Orchestration:**\n- **[docker-compose.yaml](docker-compose-yaml.md)** — Local development stack\n- **[docker-compose.document.yaml](docker-compose-document-yaml.md)** and **[docker-compose.relational.yaml](docker-compose-relational-yaml.md)** — Database-specific dev environments\n- **[docker-compose.*.test.yaml](docker-compose-document-test-yaml.md)** — Test environments\n- **[docker-compose.*.ci.yaml](docker-compose-document-ci-yaml.md)** — CI pipeline environments\n- **[coolify-docker-compose.yaml](coolify-docker-compose-yaml.md)** — Coolify deployment configuration\n\n### 3. Environment Configuration\n\n- **[env-example-relational](env-example-relational.md)** — Template for relational database deployments\n- **[env-example-document](env-example-document.md)** — Template for document database deployments\n\n### 4. Startup Automation\n\nStartup scripts orchestrate service initialization in the correct sequence:\n\n**Production:**\n- **[startup.relational.dev.sh](startup-relational-dev-sh.md)** — Starts the relational API in production\n- **[startup.document.dev.sh](startup-document-dev-sh.md)** — Starts the document API in production\n\n**Development:**\n- **[startup.relational.test.sh](startup-relational-test-sh.md)** — Local development with PostgreSQL\n- **[startup.document.test.sh](startup-document-test-sh.md)** — Local development with MongoDB\n\n**CI/CD:**\n- **[startup.relational.ci.sh](startup-relational-ci-sh.md)** — CI pipeline for relational tests\n- **[startup.document.ci.sh](startup-document-ci-sh.md)** — CI pipeline for document tests\n- **[wait-for-it.sh](wait-for-it-sh.md)** — Utility for service dependency coordination\n- **[Procfile](procfile.md)** — Process definitions for platform deployment (Heroku, Render, etc.)\n\n### 5. Documentation & Guidance\n\n- **[README.md](readme-md.md)** — Project overview and quick start\n- **[CONTEXT.md](context-md.md)** — Architectural context and decisions\n- **[GEMINI.md](gemini-md.md)** — Project architecture reference\n- **[CLAUDE.md](claude-md.md)** — AI assistant configuration\n- **[docs/](docs.md)** — Full documentation suite\n- **[agents/](agents-md.md)** — Autonomous agent configuration\n- **[superpowers/](superpowers-md.md)** — Symphony automation service\n- **[graphify-out/](graphify-out-md.md)** — Code knowledge graph visualization\n\n### 6. Test Infrastructure\n\n- **[test/jest-e2e.json](test-jest-e2e-json.md)** — Jest E2E test configuration\n- **[test/utils/constants.ts](test-utils-constants-ts.md)** — Shared test constants\n- **[admin/](admin-md.md)** — Admin functionality E2E tests\n- **[user/](user-md.md)** — Authentication E2E tests\n\n### 7. Additional Tools\n\n- **[skills-lock.json](skills-lock-json.md)** — Lockfile for installed skills\n\n## Key Workflows\n\n### Local Development\n\n1. Copy an environment template (`env-example-relational` or `env-example-document`) to `.env`\n2. Run `docker-compose.yaml` to start infrastructure services\n3. Execute the appropriate startup script to launch the API\n\n### Running Tests\n\n1. Use the test Docker Compose files to spin up isolated test environments\n2. Startup scripts handle service readiness and database seeding\n3. E2E tests validate API endpoints using Supertest\n\n### CI/CD Pipeline\n\n1. CI Docker Compose files provision clean environments\n2. Startup scripts run migrations, seeding, and the test suite\n3. Results feed back into the version control system\n\n### Container Deployment\n\n1. Production Dockerfiles build the application image\n2. Startup scripts handle database readiness and migrations\n3. Procfile defines the process model for platform deployment","overview":"# control-center — Wiki\n\n# Control Center: NestJS Boilerplate\n\nWelcome to the **control-center** repository. This project is a production-ready NestJS boilerplate designed to provide a robust foundation for building scalable REST APIs. It features a modular architecture that supports both relational (PostgreSQL via TypeORM) and document-based (MongoDB via Mongoose) databases, comprehensive authentication, and an automated AI orchestration workflow.\n\n## High-Level Architecture\n\nThe application is built around a modular core where the [Application Core](application-core.md) orchestrates feature modules and infrastructure. A key highlight of this architecture is the [Database Layer](database-layer.md), which abstracts persistence so the application can switch between SQL and NoSQL backends with minimal friction.\n\n```mermaid\ngraph TD\n App[Application Core] --> Auth[Authentication]\n App --> DB[Database Layer]\n Auth --> Users[User Management]\n Users --> Files[File Management]\n Users --> Status[Status Domain]\n Symphony[Symphony Workflow] --> DB\n Email[Email Service] --> App\n App --> I18n[Internationalization]\n```\n\n## Core Capabilities\n\n### Identity and Access\nThe [Authentication](authentication.md) module provides a secure entry point using Passport.js, supporting JWT-based sessions and social logins (Google, Facebook, Apple). It works closely with [User Management](user-management.md) to handle profile data and account lifecycles. To ensure global reach, the [Internationalization](internationalization.md) module provides localized strings for system messages and [Email Service](email-service.md) templates.\n\n### Data and Assets\nEntities within the system, such as users, often reference the [Status Domain](status-domain.md) to track state (e.g., Active, Inactive). For media and document handling, the [File Management](file-management.md) module provides a unified interface for local storage or cloud providers like AWS S3. Throughout the codebase, [Utilities](utilities.md) like the `deepResolvePromises` helper ensure that complex, nested data structures are handled efficiently before being returned to the client.\n\n### Symphony Workflow\nUnique to this boilerplate is the [Symphony Workflow](symphony-workflow.md). This is an automated issue resolution system that uses a hexagonal architecture to orchestrate AI agents. It polls for candidate issues, manages agent sessions, and handles the lifecycle of code resolutions—from branch creation to pull request management.\n\n## Key Execution Flows\n\nUnderstanding how data moves through the system is vital for new contributors:\n\n* **AI Orchestration:** The `OrchestratorService` within Symphony initiates a polling loop (`tick` -> `pollSession`) that dispatches work to agents and handles outcomes by updating issue states or adding comments via external tracker ports.\n* **Persistence Mapping:** When data is retrieved via the [Database Layer](database-layer.md), the system uses mappers (e.g., `UserMapper`) to convert raw database rows or documents into clean domain entities, ensuring that business logic remains decoupled from the storage engine.\n* **File Integration:** User updates often trigger file metadata processing, where the [User Management](user-management.md) module interacts with [File Management](file-management.md) to link uploaded assets to specific domain models.\n\n## Getting Started\n\nTo get the project running locally, ensure you have your environment variables configured and use the following commands:\n\n1. **Install dependencies:** `npm install`\n2. **Database Setup:** Use `npm run migration:run` for relational setups or the provided seeding scripts (`npm run seed:run:relational` or `npm run seed:run:document`) to populate initial data.\n3. **Development Mode:** `npm run start:dev`\n4. **Testing:** Run `npm run test` for unit tests or `npm run test:e2e` for end-to-end validation.\n\nFor more detailed information on specific components, please navigate to the individual module pages linked above.","status-domain":"# Status Domain\n\n# Status Domain Module\n\nThe Status module defines the domain model, persistence entities, and DTOs for status values used throughout the application. It supports both document-based (MongoDB) and relational (SQL) database backends through a unified interface.\n\n## Overview\n\nThe Status module represents a simple but essential domain entity—status values like \"active\" and \"inactive\" that are referenced by other entities (primarily users). The module demonstrates a clean separation between:\n\n- **Domain layer**: Pure business logic representation\n- **DTO layer**: Data transfer objects for API communication\n- **Infrastructure layer**: Database-specific persistence implementations\n\n## Key Components\n\n### Domain Entity\n\n```typescript\n// src/statuses/domain/status.ts\nexport class Status {\n id: number | string;\n name?: string;\n}\n```\n\nThe `Status` class is the core domain representation. It contains:\n\n- **`id`**: A polymorphic identifier that adapts to the database type (Number for relational databases, String for document databases). This adaptation happens through the database configuration at runtime.\n- **`name`**: An optional string representing the status label (e.g., \"active\", \"inactive\").\n\nThe class uses class-validator's `@Allow()` decorator for basic validation and Swagger's `@ApiProperty()` for OpenAPI documentation.\n\n### Status Enum\n\n```typescript\n// src/statuses/enum/status.enum.ts\nexport enum StatusEnum {\n 'active' = 1,\n 'inactive' = 2,\n}\n```\n\nDefines the two supported status values as a TypeScript enum, mapping human-readable names to numeric IDs for database storage.\n\n### Data Transfer Object\n\n```typescript\n// src/statuses/dto/status.dto.ts\nexport class StatusDto {\n @ApiProperty()\n @IsNumber()\n id: number | string;\n}\n```\n\nA minimal DTO for transferring status data across API boundaries. The `@IsNumber()` decorator validates the ID when receiving input.\n\n### Persistence Entities\n\nThe module provides two database-specific entities:\n\n**Relational (TypeORM):**\n```typescript\n// src/statuses/infrastructure/persistence/relational/entities/status.entity.ts\n@Entity({ name: 'status' })\nexport class StatusEntity extends EntityRelationalHelper {\n @PrimaryColumn()\n id: number;\n\n @Column()\n name?: string;\n}\n```\n\n**Document (MongoDB):**\n```typescript\n// src/statuses/infrastructure/persistence/document/entities/status.schema.ts\nexport class StatusSchema {\n _id: string;\n name?: string;\n}\n```\n\nNote the key differences:\n- Relational uses `id` as a `PrimaryColumn` with numeric type\n- Document uses `_id` with string type (MongoDB's default)\n- Both have an optional `name` field\n\n## Architecture\n\nThe Status module follows a clean architecture pattern where the domain entity is database-agnostic, while infrastructure layers handle persistence specifics.\n\n```mermaid\nflowchart TD\n subgraph API\n DTO[StatusDto]\n end\n \n subgraph Domain\n Status[Status]\n StatusEnum[StatusEnum]\n end\n \n subgraph Infrastructure\n subgraph Relational\n StatusEntity[StatusEntity]\n end\n subgraph Document\n StatusSchema[StatusSchema]\n end\n end\n \n DTO --> Status\n Status --> StatusEntity\n Status --> StatusSchema\n```\n\n## Usage Patterns\n\nThe Status module is primarily consumed by user-related operations. Based on the call graph:\n\n| Operation | Database | Flow |\n|-----------|----------|------|\n| Create User | Document | `create` → `toDomain` → `Status` |\n| Create User | Relational | `create` → `toPersistence` → `StatusEntity` |\n| Update User | Document | `update` → `toDomain` → `Status` |\n| Update User | Relational | `update` → `toPersistence` → `StatusEntity` |\n| Find Users by IDs | Document | `findByIds` → `toDomain` → `Status` |\n\nThe mappers (`user.mapper.ts` in both document and relational packages) handle conversion between:\n- Domain (`Status`) ↔ Persistence (`StatusEntity` or `StatusSchema`)\n- Domain (`Status`) ↔ DTO (`StatusDto`)\n\n## Database Configuration Adaptation\n\nThe domain entity's ID type adapts based on the application's database configuration:\n\n```typescript\nconst idType = (databaseConfig() as DatabaseConfig).isDocumentDatabase\n ? String\n : Number;\n```\n\nThis allows the same domain code to work with both database backends without modification. The `Status` class uses this `idType` for its `id` property, ensuring type consistency with the underlying database.","symphony-workflow":"# Symphony Workflow\n\n# Symphony Workflow Module\n\nSymphony is an automated issue resolution system that orchestrates AI agents to resolve tracker issues. It fetches candidate issues, spawns agent sessions to work on them, creates branches and pull requests, and manages the complete lifecycle from issue assignment to resolution.\n\n## Architecture Overview\n\nSymphony follows a hexagonal (ports and adapters) architecture with clear separation between domain logic and infrastructure concerns.\n\n```mermaid\ngraph TB\n subgraph \"Application Layer\"\n O[OrchestratorService] --> W[WorkflowService]\n O --> P[PersistenceService]\n O --> VM[VcsMirrorService]\n end\n\n subgraph \"Domain Layer\"\n O --> TP[TrackerPort]\n O --> AP[AgentPort]\n O --> VCS[VcsPort]\n W --> WP[WorkspacePort]\n end\n\n subgraph \"Infrastructure\"\n TP --> PA[PlaneAdapter]\n AP --> JA[JulesAdapter]\n VCS --> GHA[GitHubAdapter]\n VCS --> GLA[GitLabAdapter]\n WP --> LWA[LocalWorkspaceAdapter]\n end\n\n subgraph \"External Systems\"\n Plane[Plane API]\n Jules[Google Jules API]\n GitHub[GitHub API]\n GitLab[GitLab API]\n end\n\n PA --> Plane\n JA --> Jules\n GHA --> GitHub\n GLA --> GitLab\n```\n\n## Core Concepts\n\n### Workflow Configuration\n\nThe system is configured via a `WORKFLOW.md` file in the repository's default branch. This file uses YAML front-matter for configuration and the remainder as a Liquid template for the AI agent prompt.\n\n```yaml\n---\nrepository:\n kind: github\n url: https://github.com/org/repo\n apiKey: $GITHUB_TOKEN\n defaultBranch: main\nagent:\n kind: jules-rest\n sourceName: sources/my-agent\n apiKey: $JULES_API_KEY\n julesPollIntervalMs: 1000\n sessionCreationGracePeriodMs: 5000\ntracker:\n workspaceSlug: my-workspace\n projectId: my-project\n inReviewStateId: in-review\n---\nYour prompt template for the AI agent starts here...\n```\n\n### Sessions\n\nA **Session** represents an active attempt to resolve an issue. Sessions track:\n- Which issue is being worked on\n- The current attempt number (retries increment this)\n- The branch name (`symphony/{identifier}-attempt-{n}`)\n- Owner instance ID (for distributed coordination)\n- Status: `ACTIVE`, `COMPLETED`, `FAILED`, or `CANCELLED`\n\n### Issue Lifecycle\n\n```mermaid\nstateDiagram-v2\n [*] --> Candidate\n Candidate --> Active: Dispatch creates session\n Active --> Completed: Agent finishes successfully\n Active --> Failed: Stall/Token limit/Error\n Active --> Cancelled: External cancellation\n Failed --> Candidate: Retry with incremented attempt\n Completed --> [*]\n```\n\n## Key Services\n\n### OrchestratorService\n\nThe central coordinator that runs the main event loop. It:\n\n1. **Initialization**: Sets up adapters based on configuration (Plane for tracking, Jules for agents, GitHub or GitLab for VCS)\n2. **Reconciliation**: On startup, checks for orphaned sessions from previous runs and either resumes them or cleans them up\n3. **Polling Loop**: Continuously:\n - Fetches candidate issues from the tracker\n - Dispatches new sessions for unclaimed issues (respecting concurrency limits)\n - Polls active sessions for status updates\n - Handles session outcomes (completion, failure, stall)\n\n**Concurrency Control**: The `maxConcurrency` setting limits how many issues can be processed simultaneously. When the limit is reached, new issues are queued for the next poll cycle.\n\n**Stall Detection**: Sessions that haven't updated in 5 minutes are automatically cancelled and marked as failed.\n\n**Token Budget**: Sessions exceeding `maxSessionTokens` are terminated to prevent runaway costs.\n\n### WorkflowService\n\nHandles workflow definition loading and prompt rendering:\n\n1. **loadWorkflow**: Reads `WORKFLOW.md` from the default branch, parses YAML front-matter, validates the configuration, and extracts the prompt template\n2. **render**: Processes the Liquid template with issue context, handling:\n - Issue description truncation (default 8000 chars)\n - Continuation prompts for retry attempts (includes previous session summary)\n\n**Configuration Validation**: Validates that:\n- Repository kind is `github` or `gitlab`\n- GitLab repositories have a `mirrorUrl` (required for the sync mechanism)\n- Agent sourceName matches the pattern `sources/[a-z0-9-]+`\n- All required fields are present\n\n### PersistenceService\n\nManages session state in the `.symphony/` directory within the repository:\n\n- **current-session.json**: The active or most recent session\n- **session-{n}.json**: Log files for each attempt, including token usage, PR URL, and activity summaries\n\nThis file-based persistence allows multiple instances to coordinate via the filesystem. The `ownerInstanceId` field ensures only one instance claims an issue.\n\n### VcsMirrorService\n\nHandles GitLab's two-repository architecture:\n\n- **syncOut**: After creating a branch, pushes it to the mirror URL so the agent can access it\n- **syncIn**: After the agent completes, pulls from the mirror and pushes","utilities":"# Utilities\n\n# Utilities Module\n\nThe Utilities module provides shared helper functions, types, and infrastructure components used throughout the application. It includes promise resolution utilities, entity helpers, pagination helpers, type definitions, and configuration validation tools.\n\n## Core Promise Resolution\n\nThe most fundamental utility in this module is the deep promise resolver, which enables asynchronous data handling throughout the application.\n\n### deepResolvePromises\n\n```typescript\n// src/utils/deep-resolver.ts\nasync function deepResolvePromises(input) { ... }\n```\n\nRecursively resolves all Promises within a nested data structure. This utility handles:\n\n- **Direct Promises** — Returns the resolved value\n- **Arrays** — Maps over each element and resolves recursively via `Promise.all`\n- **Plain Objects** — Iterates over keys and resolves each value recursively\n- **Dates** — Returns unchanged (Date objects are not promises but have a `then` method)\n- **Primitives** — Returns as-is\n\n**Why this matters:** When returning entities from TypeORM or other ORMs, relations may be loaded as Promises (lazy loading). This utility ensures all nested data is fully resolved before serialization.\n\n```typescript\n// Example: Before resolution\n{ id: 1, user: Promise<User>, tags: [Promise<Tag>, Promise<Tag>] }\n\n// After deepResolvePromises\n{ id: 1, user: { id: 1, name: 'Alice' }, tags: [{ id: 1 }, { id: 2 }] }\n```\n\n### ResolvePromisesInterceptor\n\n```typescript\n// src/utils/serializer.interceptor.ts\n@Injectable()\nexport class ResolvePromisesInterceptor implements NestInterceptor { ... }\n```\n\nA NestJS interceptor that automatically applies `deepResolvePromises` to all controller responses. This is registered globally in `src/main.ts` (bootstrap), ensuring every endpoint response has promises resolved before being sent to clients.\n\n```mermaid\ngraph LR\n A[Controller<br/>returns data] --> B[ResolvePromisesInterceptor]\n B --> C[deepResolvePromises]\n C --> D{Input type?}\n D -->|Promise| E[await result]\n D -->|Array| F[Promise.all + map]\n D -->|Object| G[recurse keys]\n D -->|Other| H[return as-is]\n E --> I[Serialized<br/>Response]\n F --> I\n G --> I\n H --> I\n```\n\n**Registration (src/main.ts):**\n```typescript\napp.useGlobalInterceptors(new ResolvePromisesInterceptor());\n```\n\n## Entity Helpers\n\nThese base classes provide serialization and transformation capabilities for database entities.\n\n### EntityRelationalHelper\n\n```typescript\n// src/utils/relational-entity-helper.ts\nexport class EntityRelationalHelper extends BaseEntity { ... }\n```\n\nBase class for TypeORM entities that enables proper serialization. Extends TypeORM's `BaseEntity` and provides:\n\n- **`__entity`** — Stores the entity class name after load\n- **`toJSON()`** — Uses `class-transformer`'s `instanceToPlain` to convert the entity to a plain JSON object, respecting all `@Exclude` and transformation decorators\n\n**Usage:** Extend this class for your TypeORM entities:\n\n```typescript\n@Entity()\nexport class User extends EntityRelationalHelper {\n @PrimaryGeneratedColumn()\n id: number;\n\n @Column()\n @Exclude()\n password: string;\n}\n```\n\n### EntityDocumentHelper\n\n```typescript\n// src/utils/document-entity-helper.ts\nexport class EntityDocumentHelper {\n @Transform(...)\n public _id: string;\n}\n```\n\nA class-transformer decorated property for handling document-style IDs (common with MongoDB or when using string IDs). The transform ensures the ID is converted to a string in the plain output.\n\nThis addresses a known issue in class-transformer (#879) where nested transformations may receive the full object instead of just the value.\n\n## Pagination\n\nThe module provides utilities for infinite-scroll style pagination.\n\n### InfinityPaginationResponseDto\n\n```typescript\n// src/utils/dto/infinity-pagination-response.dto.ts\nexport class InfinityPaginationResponseDto<T> {\n data: T[];\n hasNextPage: boolean;\n}\n```\n\nGeneric DTO representing a paginated response with:\n\n- **`data: T[]`** — Array of items for the current page\n- **`hasNextPage: boolean`** — Indicates whether more items exist\n\n### InfinityPaginationResponse (Decorator)\n\n```typescript\n// src/utils/dto/infinity-pagination-response.dto.ts\nexport function InfinityPaginationResponse<T>(classReference: Type<T>) { ... }\n```\n\nA decorator factory that generates a Swagger-documented response type for a specific entity. Used in controllers to document paginated endpoints.\n\n**Usage (from users.controller.ts):**\n```typescript\n@ApiResponse({\n type: InfinityPaginationResponse(UserDto),\n})\n@Get()\nfindAll() { ... }\n```\n\n### infinityPagination\n\n```typescript\n// src/utils/infinity-pagination.ts\nexport const infinityPagination = <T>(\n data: T[],\n options: IPaginationOptions,\n): InfinityPaginationResponseDto<T> => { ... }\n```\n\nHelper function that constructs a pagination response. The `hasNextPage` logic assumes that when the returned count equals the requested limit, there may be more data available.\n\n```typescript\nconst result = infinityPagination(users, { page: 1, limit: 10 });\n// If 10 users returned, hasNextPage = true (more may exist)\n// If 5 users returned, hasNextPage = false\n```\n\n## Type Definitions\n\nThe module exports several reusable TypeScript types:\n\n| Type | Definition | Use Case |\n|------|------------|----------|\n| `DeepPartial<T>` | Recursively makes all properties optional | Creating partial updates |\n| `MaybeType<T>` | `T \\| undefined` | Nullable-in-practice return types |\n| `NullableType<T>` | `T \\| null` | Explicit null handling |\n| `OrNeverType<T>` | `T \\| never` | Type narrowing / discriminated unions |\n| `IPaginationOptions` | `{ page: number; limit: number }` | Standard pagination params |\n\n## Transformers\n\n### lowerCaseTransformer\n\n```typescript\n// src/utils/transformers/lower-case.transformer.ts\nexport const lowerCaseTransformer = (params: TransformFnParams): MaybeType<string>\n```\n\nA class-transformer function that converts strings to lowercase and trims whitespace. Use with the `@Transform` decorator on DTO properties:\n\n```typescript\nexport class CreateUserDto {\n @Transform(lowerCaseTransformer)\n email: string;\n}\n```\n\n## Configuration Validation\n\n### validateConfig\n\n```typescript\n// src/utils/validate-config.ts\nfunction validateConfig<T extends object>(\n config: Record<string, unknown>,\n envVariablesClass: ClassConstructor<T>,\n) { ... }\n```\n\nValidates configuration objects (typically environment variables) using class-validator. Converts plain config objects to class instances, then validates against class constraints.\n\n**Process:**\n1. `plainToClass` — Converts raw config to typed class instance\n2. `validateSync` — Runs class-validator decorators\n3. Throws if validation fails, returns validated config otherwise\n\n**Used by:** All config modules in the application:\n- `app.config.ts`\n- `database.config.ts`\n- `auth.config.ts`\n- `mail.config.ts`\n- `file.config.ts`\n- `google.config.ts`\n- `facebook.config.ts`\n- `apple.config.ts`\n\n**Example:**\n```typescript\n// database/config/database.config.ts\nimport validateConfig from '../../utils/validate-config';\n\nexport default () => validateConfig(process.env, DatabaseConfig);\n```\n\n### validationOptions\n\n```typescript\n// src/utils/validation-options.ts\nconst validationOptions: ValidationPipeOptions = { ... }\n```\n\nPre-configured NestJS ValidationPipe options:\n\n- **`transform: true`** — Transforms payloads to DTO instances\n- **`whitelist: true`** — Strips properties not defined in DTOs\n- **`errorHttpStatusCode: UNPROCESSABLE_ENTITY`** — Returns 422 on validation errors\n- **`exceptionFactory`** — Custom error format with nested error details\n\nThe `generateErrors` helper recursively flattens validation error trees into a clean object structure:\n\n```typescript\n// Input: nested ValidationError array\n// Output: { fieldName: \"error message\", \"nested.field\": \"error\" }\n```\n\n## Module Architecture\n\n```mermaid\ngraph TB\n subgraph \"Entry Points\"\n A[main.ts - bootstrap] --> B[ResolvePromisesInterceptor]\n end\n\n subgraph \"Core Utilities\"\n B --> C[deepResolvePromises]\n C --> C\n end\n\n subgraph \"Entity Layer\"\n C --> D[EntityRelationalHelper]\n C --> E[EntityDocumentHelper]\n end\n\n subgraph \"Types\"\n F[IPaginationOptions] --> G[infinityPagination]\n G --> H[InfinityPaginationResponseDto]\n H --> I[InfinityPaginationResponse decorator]\n end\n\n subgraph \"Configuration\"\n J[*.config.ts] --> K[validateConfig]\n K --> L[class-validator]\n M[main.ts] --> N[validationOptions]\n N --> O[ValidationPipe]\n end\n```\n\n## Integration Summary\n\n| Component | Integrates With | Purpose |\n|-----------|-----------------|---------|\n| `ResolvePromisesInterceptor` | `main.ts`, `deepResolvePromises` | Auto-resolve promises in all responses |\n| `deepResolvePromises` | `serializer.interceptor.ts` | Core recursive resolution logic |\n| `EntityRelationalHelper` | TypeORM entities | Serialization for relational DB entities |\n| `validateConfig` | All `*.config.ts` files | Environment configuration validation |\n| `validationOptions` | `main.ts` | Global validation pipe setup |\n| `infinityPagination` | Controllers returning paginated data | Build pagination responses |"}; | |
| var TREE = [{"name":"Authentication","slug":"authentication","files":["src/auth/auth.controller.ts","src/auth/auth.module.ts","src/auth/auth.service.ts","src/auth/auth-providers.enum.ts","src/auth/config/auth-config.type.ts","src/auth/config/auth.config.ts","src/auth/dto/auth-confirm-email.dto.ts","src/auth/dto/auth-email-login.dto.ts","src/auth/dto/auth-forgot-password.dto.ts","src/auth/dto/auth-register-login.dto.ts","src/auth/dto/auth-reset-password.dto.ts","src/auth/dto/auth-update.dto.ts","src/auth/dto/login-response.dto.ts","src/auth/dto/refresh-response.dto.ts","src/auth/strategies/anonymous.strategy.ts","src/auth/strategies/jwt-refresh.strategy.ts","src/auth/strategies/jwt.strategy.ts","src/auth/strategies/types/jwt-payload.type.ts","src/auth/strategies/types/jwt-refresh-payload.type.ts","src/auth-apple/auth-apple.controller.ts","src/auth-apple/auth-apple.module.ts","src/auth-apple/auth-apple.service.ts","src/auth-apple/config/apple-config.type.ts","src/auth-apple/config/apple.config.ts","src/auth-apple/dto/auth-apple-login.dto.ts","src/auth-facebook/auth-facebook.controller.ts","src/auth-facebook/auth-facebook.module.ts","src/auth-facebook/auth-facebook.service.ts","src/auth-facebook/config/facebook-config.type.ts","src/auth-facebook/config/facebook.config.ts","src/auth-facebook/dto/auth-facebook-login.dto.ts","src/auth-facebook/interfaces/facebook.interface.ts","src/auth-google/auth-google.controller.ts","src/auth-google/auth-google.module.ts","src/auth-google/auth-google.service.ts","src/auth-google/config/google-config.type.ts","src/auth-google/config/google.config.ts","src/auth-google/dto/auth-google-login.dto.ts","src/session/domain/session.ts","src/session/infrastructure/persistence/document/document-persistence.module.ts","src/session/infrastructure/persistence/document/entities/session.schema.ts","src/session/infrastructure/persistence/document/mappers/session.mapper.ts","src/session/infrastructure/persistence/document/repositories/session.repository.ts","src/session/infrastructure/persistence/relational/entities/session.entity.ts","src/session/infrastructure/persistence/relational/mappers/session.mapper.ts","src/session/infrastructure/persistence/relational/relational-persistence.module.ts","src/session/infrastructure/persistence/relational/repositories/session.repository.ts","src/session/infrastructure/persistence/session.repository.ts","src/session/session.module.ts","src/session/session.service.ts","src/roles/domain/role.ts","src/roles/dto/role.dto.ts","src/roles/infrastructure/persistence/document/entities/role.schema.ts","src/roles/infrastructure/persistence/relational/entities/role.entity.ts","src/roles/roles.decorator.ts","src/roles/roles.enum.ts","src/roles/roles.guard.ts","src/social/interfaces/social.interface.ts","src/social/tokens.ts"]},{"name":"User Management","slug":"user-management","files":["src/users/domain/user.ts","src/users/dto/create-user.dto.ts","src/users/dto/query-user.dto.ts","src/users/dto/update-user.dto.ts","src/users/dto/user.dto.ts","src/users/infrastructure/persistence/document/document-persistence.module.ts","src/users/infrastructure/persistence/document/entities/user.schema.ts","src/users/infrastructure/persistence/document/mappers/user.mapper.ts","src/users/infrastructure/persistence/document/repositories/user.repository.ts","src/users/infrastructure/persistence/relational/entities/user.entity.ts","src/users/infrastructure/persistence/relational/mappers/user.mapper.ts","src/users/infrastructure/persistence/relational/relational-persistence.module.ts","src/users/infrastructure/persistence/relational/repositories/user.repository.ts","src/users/infrastructure/persistence/user.repository.ts","src/users/users.controller.ts","src/users/users.module.ts","src/users/users.service.ts"]},{"name":"File Management","slug":"file-management","files":["src/files/config/file-config.type.ts","src/files/config/file.config.ts","src/files/domain/file.ts","src/files/dto/file.dto.ts","src/files/files.module.ts","src/files/files.service.ts","src/files/infrastructure/persistence/document/document-persistence.module.ts","src/files/infrastructure/persistence/document/entities/file.schema.ts","src/files/infrastructure/persistence/document/mappers/file.mapper.ts","src/files/infrastructure/persistence/document/repositories/file.repository.ts","src/files/infrastructure/persistence/file.repository.ts","src/files/infrastructure/persistence/relational/entities/file.entity.ts","src/files/infrastructure/persistence/relational/mappers/file.mapper.ts","src/files/infrastructure/persistence/relational/relational-persistence.module.ts","src/files/infrastructure/persistence/relational/repositories/file.repository.ts","src/files/infrastructure/uploader/local/dto/file-response.dto.ts","src/files/infrastructure/uploader/local/files.controller.ts","src/files/infrastructure/uploader/local/files.module.ts","src/files/infrastructure/uploader/local/files.service.ts","src/files/infrastructure/uploader/s3-presigned/dto/file-response.dto.ts","src/files/infrastructure/uploader/s3-presigned/dto/file.dto.ts","src/files/infrastructure/uploader/s3-presigned/files.controller.ts","src/files/infrastructure/uploader/s3-presigned/files.module.ts","src/files/infrastructure/uploader/s3-presigned/files.service.ts","src/files/infrastructure/uploader/s3/dto/file-response.dto.ts","src/files/infrastructure/uploader/s3/files.controller.ts","src/files/infrastructure/uploader/s3/files.module.ts","src/files/infrastructure/uploader/s3/files.service.ts"]},{"name":"Database Layer","slug":"database-layer","files":["src/database/config/database-config.type.ts","src/database/config/database.config.ts","src/database/data-source.ts","src/database/migrations/1715028537217-CreateUser.ts","src/database/mongoose-config.service.ts","src/database/typeorm-config.service.ts","src/database/seeds/document/run-seed.ts","src/database/seeds/document/seed.module.ts","src/database/seeds/document/user/user-seed.module.ts","src/database/seeds/document/user/user-seed.service.ts","src/database/seeds/relational/role/role-seed.module.ts","src/database/seeds/relational/role/role-seed.service.ts","src/database/seeds/relational/run-seed.ts","src/database/seeds/relational/seed.module.ts","src/database/seeds/relational/status/status-seed.module.ts","src/database/seeds/relational/status/status-seed.service.ts","src/database/seeds/relational/user/user-seed.module.ts","src/database/seeds/relational/user/user-seed.service.ts","src/config/app-config.type.ts","src/config/app.config.ts","src/config/config.type.ts"]},{"name":"Email Service","slug":"email-service","files":["src/mail/config/mail-config.type.ts","src/mail/config/mail.config.ts","src/mail/interfaces/mail-data.interface.ts","src/mail/mail-templates/activation.hbs","src/mail/mail-templates/confirm-new-email.hbs","src/mail/mail-templates/reset-password.hbs","src/mail/mail.module.ts","src/mail/mail.service.ts","src/mailer/mailer.module.ts","src/mailer/mailer.service.ts"]},{"name":"Symphony Workflow","slug":"symphony-workflow","files":["src/symphony/application/orchestrator.service.ts","src/symphony/application/persistence.service.ts","src/symphony/application/vcs-mirror.service.ts","src/symphony/application/workflow.service.spec.ts","src/symphony/application/workflow.service.ts","src/symphony/domain/entities/issue.entity.ts","src/symphony/domain/entities/session.entity.ts","src/symphony/domain/entities/workflow-config.entity.ts","src/symphony/domain/entities/workflow-definition.entity.ts","src/symphony/domain/ports/agent.port.ts","src/symphony/domain/ports/tracker.port.ts","src/symphony/domain/ports/vcs.port.ts","src/symphony/domain/ports/workspace.port.ts","src/symphony/infrastructure/adapters/agent/jules.adapter.ts","src/symphony/infrastructure/adapters/tracker/plane.adapter.ts","src/symphony/infrastructure/adapters/vcs/github.adapter.ts","src/symphony/infrastructure/adapters/vcs/gitlab.adapter.ts","src/symphony/infrastructure/adapters/workspace/local-workspace.adapter.ts","src/symphony/infrastructure/clients/jules.client.ts","src/symphony/infrastructure/clients/plane.client.ts","src/symphony/symphony.module.ts"]},{"name":"Status Domain","slug":"status-domain","files":["src/statuses/domain/status.ts","src/statuses/dto/status.dto.ts","src/statuses/infrastructure/persistence/document/entities/status.schema.ts","src/statuses/infrastructure/persistence/relational/entities/status.entity.ts","src/statuses/statuses.enum.ts"]},{"name":"Internationalization","slug":"internationalization","files":["src/i18n/ar/common.json","src/i18n/ar/confirm-email.json","src/i18n/ar/confirm-new-email.json","src/i18n/ar/reset-password.json","src/i18n/en/common.json","src/i18n/en/confirm-email.json","src/i18n/en/confirm-new-email.json","src/i18n/en/reset-password.json","src/i18n/es/common.json","src/i18n/es/confirm-email.json","src/i18n/es/confirm-new-email.json","src/i18n/es/reset-password.json","src/i18n/fr/common.json","src/i18n/fr/confirm-email.json","src/i18n/fr/confirm-new-email.json","src/i18n/fr/reset-password.json","src/i18n/hi/common.json","src/i18n/hi/confirm-email.json","src/i18n/hi/confirm-new-email.json","src/i18n/hi/reset-password.json","src/i18n/uk/common.json","src/i18n/uk/confirm-email.json","src/i18n/uk/confirm-new-email.json","src/i18n/uk/reset-password.json","src/i18n/zh/common.json","src/i18n/zh/confirm-email.json","src/i18n/zh/confirm-new-email.json","src/i18n/zh/reset-password.json"]},{"name":"Utilities","slug":"utilities","files":["src/utils/deep-resolver.ts","src/utils/document-entity-helper.ts","src/utils/dto/infinity-pagination-response.dto.ts","src/utils/infinity-pagination.ts","src/utils/relational-entity-helper.ts","src/utils/serializer.interceptor.ts","src/utils/transformers/lower-case.transformer.ts","src/utils/types/deep-partial.type.ts","src/utils/types/maybe.type.ts","src/utils/types/nullable.type.ts","src/utils/types/or-never.type.ts","src/utils/types/pagination-options.ts","src/utils/validate-config.ts","src/utils/validation-options.ts"]},{"name":"Application Core","slug":"application-core","files":["src/app.module.ts","src/main.ts","src/home/home.controller.ts","src/home/home.module.ts","src/home/home.service.ts"]},{"name":"Other","slug":"other","files":[],"children":[{"name":"Other — CLAUDE.md","slug":"other-claude-md","files":["CLAUDE.md"]},{"name":"Other — CONTEXT.md","slug":"other-context-md","files":["CONTEXT.md"]},{"name":"Other — Dockerfile","slug":"other-dockerfile","files":["Dockerfile"]},{"name":"Other — GEMINI.md","slug":"other-gemini-md","files":["GEMINI.md"]},{"name":"Other — Procfile","slug":"other-procfile","files":["Procfile"]},{"name":"Other — README.md","slug":"other-readme-md","files":["README.md"]},{"name":"Other — commitlint.config.js","slug":"other-commitlint-config-js","files":["commitlint.config.js"]},{"name":"Other — coolify-docker-compose.yaml","slug":"other-coolify-docker-compose-yaml","files":["coolify-docker-compose.yaml"]},{"name":"Other — docker-compose.document.ci.yaml","slug":"other-docker-compose-document-ci-yaml","files":["docker-compose.document.ci.yaml"]},{"name":"Other — docker-compose.document.test.yaml","slug":"other-docker-compose-document-test-yaml","files":["docker-compose.document.test.yaml"]},{"name":"Other — docker-compose.document.yaml","slug":"other-docker-compose-document-yaml","files":["docker-compose.document.yaml"]},{"name":"Other — docker-compose.relational.ci.yaml","slug":"other-docker-compose-relational-ci-yaml","files":["docker-compose.relational.ci.yaml"]},{"name":"Other — docker-compose.relational.test.yaml","slug":"other-docker-compose-relational-test-yaml","files":["docker-compose.relational.test.yaml"]},{"name":"Other — docker-compose.yaml","slug":"other-docker-compose-yaml","files":["docker-compose.yaml"]},{"name":"Other — docs","slug":"other-docs","files":["docs/INSTALL_VERIFY.md","docs/SPEC.md","docs/architecture.md","docs/auth.md","docs/automatic-update-dependencies.md","docs/benchmarking.md","docs/cli.md","docs/database.md","docs/design-review-1-opus.md","docs/design-review-2-opus.md","docs/file-uploading.md","docs/installing-and-running.md","docs/introduction.md","docs/production-harness-gap-analysis.md","docs/readme.md","docs/serialization.md","docs/tests.md","docs/translations.md"]},{"name":"Other — agents","slug":"other-agents","files":["docs/agents/domain.md","docs/agents/issue-tracker.md","docs/agents/triage-labels.md"]},{"name":"Other — superpowers","slug":"other-superpowers","files":["docs/superpowers/plans/2026-05-02-symphony-adaptation.md","docs/superpowers/specs/2026-05-02-symphony-adaptation-design.md"]},{"name":"Other — document.Dockerfile","slug":"other-document-dockerfile","files":["document.Dockerfile"]},{"name":"Other — document.e2e.Dockerfile","slug":"other-document-e2e-dockerfile","files":["document.e2e.Dockerfile"]},{"name":"Other — document.test.Dockerfile","slug":"other-document-test-dockerfile","files":["document.test.Dockerfile"]},{"name":"Other — env-example-document","slug":"other-env-example-document","files":["env-example-document"]},{"name":"Other — env-example-relational","slug":"other-env-example-relational","files":["env-example-relational"]},{"name":"Other — eslint.config.mjs","slug":"other-eslint-config-mjs","files":["eslint.config.mjs"]},{"name":"Other — graphify-out","slug":"other-graphify-out","files":["graphify-out/GRAPH_REPORT.md","graphify-out/cost.json","graphify-out/graph.html","graphify-out/graph.json","graphify-out/manifest.json"]},{"name":"Other — maildev.Dockerfile","slug":"other-maildev-dockerfile","files":["maildev.Dockerfile"]},{"name":"Other — nest-cli.json","slug":"other-nest-cli-json","files":["nest-cli.json"]},{"name":"Other — package.json","slug":"other-package-json","files":["package.json"]},{"name":"Other — relational.e2e.Dockerfile","slug":"other-relational-e2e-dockerfile","files":["relational.e2e.Dockerfile"]},{"name":"Other — relational.test.Dockerfile","slug":"other-relational-test-dockerfile","files":["relational.test.Dockerfile"]},{"name":"Other — renovate.json","slug":"other-renovate-json","files":["renovate.json"]},{"name":"Other — skills-lock.json","slug":"other-skills-lock-json","files":["skills-lock.json"]},{"name":"Other — startup.document.ci.sh","slug":"other-startup-document-ci-sh","files":["startup.document.ci.sh"]},{"name":"Other — startup.document.dev.sh","slug":"other-startup-document-dev-sh","files":["startup.document.dev.sh"]},{"name":"Other — startup.document.test.sh","slug":"other-startup-document-test-sh","files":["startup.document.test.sh"]},{"name":"Other — startup.relational.ci.sh","slug":"other-startup-relational-ci-sh","files":["startup.relational.ci.sh"]},{"name":"Other — startup.relational.dev.sh","slug":"other-startup-relational-dev-sh","files":["startup.relational.dev.sh"]},{"name":"Other — startup.relational.test.sh","slug":"other-startup-relational-test-sh","files":["startup.relational.test.sh"]},{"name":"Other — admin","slug":"other-admin","files":["test/admin/auth.e2e-spec.ts","test/admin/users.e2e-spec.ts"]},{"name":"Other — test","slug":"other-test","files":["test/jest-e2e.json"]},{"name":"Other — user","slug":"other-user","files":["test/user/auth.e2e-spec.ts"]},{"name":"Other — utils","slug":"other-utils","files":["test/utils/constants.ts"]},{"name":"Other — tsconfig.build.json","slug":"other-tsconfig-build-json","files":["tsconfig.build.json"]},{"name":"Other — tsconfig.json","slug":"other-tsconfig-json","files":["tsconfig.json"]},{"name":"Other — wait-for-it.sh","slug":"other-wait-for-it-sh","files":["wait-for-it.sh"]}]}]; | |
| var META = {"fromCommit":"145ecd9ca71651341537c9143585078bcfedbc65","generatedAt":"2026-05-04T22:37:18.922Z","model":"gemini-3-flash-preview","moduleFiles":{"Authentication":["src/auth/auth.controller.ts","src/auth/auth.module.ts","src/auth/auth.service.ts","src/auth/auth-providers.enum.ts","src/auth/config/auth-config.type.ts","src/auth/config/auth.config.ts","src/auth/dto/auth-confirm-email.dto.ts","src/auth/dto/auth-email-login.dto.ts","src/auth/dto/auth-forgot-password.dto.ts","src/auth/dto/auth-register-login.dto.ts","src/auth/dto/auth-reset-password.dto.ts","src/auth/dto/auth-update.dto.ts","src/auth/dto/login-response.dto.ts","src/auth/dto/refresh-response.dto.ts","src/auth/strategies/anonymous.strategy.ts","src/auth/strategies/jwt-refresh.strategy.ts","src/auth/strategies/jwt.strategy.ts","src/auth/strategies/types/jwt-payload.type.ts","src/auth/strategies/types/jwt-refresh-payload.type.ts","src/auth-apple/auth-apple.controller.ts","src/auth-apple/auth-apple.module.ts","src/auth-apple/auth-apple.service.ts","src/auth-apple/config/apple-config.type.ts","src/auth-apple/config/apple.config.ts","src/auth-apple/dto/auth-apple-login.dto.ts","src/auth-facebook/auth-facebook.controller.ts","src/auth-facebook/auth-facebook.module.ts","src/auth-facebook/auth-facebook.service.ts","src/auth-facebook/config/facebook-config.type.ts","src/auth-facebook/config/facebook.config.ts","src/auth-facebook/dto/auth-facebook-login.dto.ts","src/auth-facebook/interfaces/facebook.interface.ts","src/auth-google/auth-google.controller.ts","src/auth-google/auth-google.module.ts","src/auth-google/auth-google.service.ts","src/auth-google/config/google-config.type.ts","src/auth-google/config/google.config.ts","src/auth-google/dto/auth-google-login.dto.ts","src/session/domain/session.ts","src/session/infrastructure/persistence/document/document-persistence.module.ts","src/session/infrastructure/persistence/document/entities/session.schema.ts","src/session/infrastructure/persistence/document/mappers/session.mapper.ts","src/session/infrastructure/persistence/document/repositories/session.repository.ts","src/session/infrastructure/persistence/relational/entities/session.entity.ts","src/session/infrastructure/persistence/relational/mappers/session.mapper.ts","src/session/infrastructure/persistence/relational/relational-persistence.module.ts","src/session/infrastructure/persistence/relational/repositories/session.repository.ts","src/session/infrastructure/persistence/session.repository.ts","src/session/session.module.ts","src/session/session.service.ts","src/roles/domain/role.ts","src/roles/dto/role.dto.ts","src/roles/infrastructure/persistence/document/entities/role.schema.ts","src/roles/infrastructure/persistence/relational/entities/role.entity.ts","src/roles/roles.decorator.ts","src/roles/roles.enum.ts","src/roles/roles.guard.ts","src/social/interfaces/social.interface.ts","src/social/tokens.ts"],"User Management":["src/users/domain/user.ts","src/users/dto/create-user.dto.ts","src/users/dto/query-user.dto.ts","src/users/dto/update-user.dto.ts","src/users/dto/user.dto.ts","src/users/infrastructure/persistence/document/document-persistence.module.ts","src/users/infrastructure/persistence/document/entities/user.schema.ts","src/users/infrastructure/persistence/document/mappers/user.mapper.ts","src/users/infrastructure/persistence/document/repositories/user.repository.ts","src/users/infrastructure/persistence/relational/entities/user.entity.ts","src/users/infrastructure/persistence/relational/mappers/user.mapper.ts","src/users/infrastructure/persistence/relational/relational-persistence.module.ts","src/users/infrastructure/persistence/relational/repositories/user.repository.ts","src/users/infrastructure/persistence/user.repository.ts","src/users/users.controller.ts","src/users/users.module.ts","src/users/users.service.ts"],"File Management":["src/files/config/file-config.type.ts","src/files/config/file.config.ts","src/files/domain/file.ts","src/files/dto/file.dto.ts","src/files/files.module.ts","src/files/files.service.ts","src/files/infrastructure/persistence/document/document-persistence.module.ts","src/files/infrastructure/persistence/document/entities/file.schema.ts","src/files/infrastructure/persistence/document/mappers/file.mapper.ts","src/files/infrastructure/persistence/document/repositories/file.repository.ts","src/files/infrastructure/persistence/file.repository.ts","src/files/infrastructure/persistence/relational/entities/file.entity.ts","src/files/infrastructure/persistence/relational/mappers/file.mapper.ts","src/files/infrastructure/persistence/relational/relational-persistence.module.ts","src/files/infrastructure/persistence/relational/repositories/file.repository.ts","src/files/infrastructure/uploader/local/dto/file-response.dto.ts","src/files/infrastructure/uploader/local/files.controller.ts","src/files/infrastructure/uploader/local/files.module.ts","src/files/infrastructure/uploader/local/files.service.ts","src/files/infrastructure/uploader/s3-presigned/dto/file-response.dto.ts","src/files/infrastructure/uploader/s3-presigned/dto/file.dto.ts","src/files/infrastructure/uploader/s3-presigned/files.controller.ts","src/files/infrastructure/uploader/s3-presigned/files.module.ts","src/files/infrastructure/uploader/s3-presigned/files.service.ts","src/files/infrastructure/uploader/s3/dto/file-response.dto.ts","src/files/infrastructure/uploader/s3/files.controller.ts","src/files/infrastructure/uploader/s3/files.module.ts","src/files/infrastructure/uploader/s3/files.service.ts"],"Database Layer":["src/database/config/database-config.type.ts","src/database/config/database.config.ts","src/database/data-source.ts","src/database/migrations/1715028537217-CreateUser.ts","src/database/mongoose-config.service.ts","src/database/typeorm-config.service.ts","src/database/seeds/document/run-seed.ts","src/database/seeds/document/seed.module.ts","src/database/seeds/document/user/user-seed.module.ts","src/database/seeds/document/user/user-seed.service.ts","src/database/seeds/relational/role/role-seed.module.ts","src/database/seeds/relational/role/role-seed.service.ts","src/database/seeds/relational/run-seed.ts","src/database/seeds/relational/seed.module.ts","src/database/seeds/relational/status/status-seed.module.ts","src/database/seeds/relational/status/status-seed.service.ts","src/database/seeds/relational/user/user-seed.module.ts","src/database/seeds/relational/user/user-seed.service.ts","src/config/app-config.type.ts","src/config/app.config.ts","src/config/config.type.ts"],"Email Service":["src/mail/config/mail-config.type.ts","src/mail/config/mail.config.ts","src/mail/interfaces/mail-data.interface.ts","src/mail/mail-templates/activation.hbs","src/mail/mail-templates/confirm-new-email.hbs","src/mail/mail-templates/reset-password.hbs","src/mail/mail.module.ts","src/mail/mail.service.ts","src/mailer/mailer.module.ts","src/mailer/mailer.service.ts"],"Symphony Workflow":["src/symphony/application/orchestrator.service.ts","src/symphony/application/persistence.service.ts","src/symphony/application/vcs-mirror.service.ts","src/symphony/application/workflow.service.spec.ts","src/symphony/application/workflow.service.ts","src/symphony/domain/entities/issue.entity.ts","src/symphony/domain/entities/session.entity.ts","src/symphony/domain/entities/workflow-config.entity.ts","src/symphony/domain/entities/workflow-definition.entity.ts","src/symphony/domain/ports/agent.port.ts","src/symphony/domain/ports/tracker.port.ts","src/symphony/domain/ports/vcs.port.ts","src/symphony/domain/ports/workspace.port.ts","src/symphony/infrastructure/adapters/agent/jules.adapter.ts","src/symphony/infrastructure/adapters/tracker/plane.adapter.ts","src/symphony/infrastructure/adapters/vcs/github.adapter.ts","src/symphony/infrastructure/adapters/vcs/gitlab.adapter.ts","src/symphony/infrastructure/adapters/workspace/local-workspace.adapter.ts","src/symphony/infrastructure/clients/jules.client.ts","src/symphony/infrastructure/clients/plane.client.ts","src/symphony/symphony.module.ts"],"Status Domain":["src/statuses/domain/status.ts","src/statuses/dto/status.dto.ts","src/statuses/infrastructure/persistence/document/entities/status.schema.ts","src/statuses/infrastructure/persistence/relational/entities/status.entity.ts","src/statuses/statuses.enum.ts"],"Internationalization":["src/i18n/ar/common.json","src/i18n/ar/confirm-email.json","src/i18n/ar/confirm-new-email.json","src/i18n/ar/reset-password.json","src/i18n/en/common.json","src/i18n/en/confirm-email.json","src/i18n/en/confirm-new-email.json","src/i18n/en/reset-password.json","src/i18n/es/common.json","src/i18n/es/confirm-email.json","src/i18n/es/confirm-new-email.json","src/i18n/es/reset-password.json","src/i18n/fr/common.json","src/i18n/fr/confirm-email.json","src/i18n/fr/confirm-new-email.json","src/i18n/fr/reset-password.json","src/i18n/hi/common.json","src/i18n/hi/confirm-email.json","src/i18n/hi/confirm-new-email.json","src/i18n/hi/reset-password.json","src/i18n/uk/common.json","src/i18n/uk/confirm-email.json","src/i18n/uk/confirm-new-email.json","src/i18n/uk/reset-password.json","src/i18n/zh/common.json","src/i18n/zh/confirm-email.json","src/i18n/zh/confirm-new-email.json","src/i18n/zh/reset-password.json"],"Utilities":["src/utils/deep-resolver.ts","src/utils/document-entity-helper.ts","src/utils/dto/infinity-pagination-response.dto.ts","src/utils/infinity-pagination.ts","src/utils/relational-entity-helper.ts","src/utils/serializer.interceptor.ts","src/utils/transformers/lower-case.transformer.ts","src/utils/types/deep-partial.type.ts","src/utils/types/maybe.type.ts","src/utils/types/nullable.type.ts","src/utils/types/or-never.type.ts","src/utils/types/pagination-options.ts","src/utils/validate-config.ts","src/utils/validation-options.ts"],"Application Core":["src/app.module.ts","src/main.ts","src/home/home.controller.ts","src/home/home.module.ts","src/home/home.service.ts"],"Other":["CLAUDE.md","CONTEXT.md","Dockerfile","GEMINI.md","Procfile","README.md","commitlint.config.js","coolify-docker-compose.yaml","docker-compose.document.ci.yaml","docker-compose.document.test.yaml","docker-compose.document.yaml","docker-compose.relational.ci.yaml","docker-compose.relational.test.yaml","docker-compose.yaml","docs/INSTALL_VERIFY.md","docs/SPEC.md","docs/architecture.md","docs/auth.md","docs/automatic-update-dependencies.md","docs/benchmarking.md","docs/cli.md","docs/database.md","docs/design-review-1-opus.md","docs/design-review-2-opus.md","docs/file-uploading.md","docs/installing-and-running.md","docs/introduction.md","docs/production-harness-gap-analysis.md","docs/readme.md","docs/serialization.md","docs/tests.md","docs/translations.md","docs/agents/domain.md","docs/agents/issue-tracker.md","docs/agents/triage-labels.md","docs/superpowers/plans/2026-05-02-symphony-adaptation.md","docs/superpowers/specs/2026-05-02-symphony-adaptation-design.md","document.Dockerfile","document.e2e.Dockerfile","document.test.Dockerfile","env-example-document","env-example-relational","eslint.config.mjs","graphify-out/GRAPH_REPORT.md","graphify-out/cost.json","graphify-out/graph.html","graphify-out/graph.json","graphify-out/manifest.json","maildev.Dockerfile","nest-cli.json","package.json","relational.e2e.Dockerfile","relational.test.Dockerfile","renovate.json","skills-lock.json","startup.document.ci.sh","startup.document.dev.sh","startup.document.test.sh","startup.relational.ci.sh","startup.relational.dev.sh","startup.relational.test.sh","test/admin/auth.e2e-spec.ts","test/admin/users.e2e-spec.ts","test/jest-e2e.json","test/user/auth.e2e-spec.ts","test/utils/constants.ts","tsconfig.build.json","tsconfig.json","wait-for-it.sh"],"Other — CLAUDE.md":["CLAUDE.md"],"Other — CONTEXT.md":["CONTEXT.md"],"Other — Dockerfile":["Dockerfile"],"Other — GEMINI.md":["GEMINI.md"],"Other — Procfile":["Procfile"],"Other — README.md":["README.md"],"Other — commitlint.config.js":["commitlint.config.js"],"Other — coolify-docker-compose.yaml":["coolify-docker-compose.yaml"],"Other — docker-compose.document.ci.yaml":["docker-compose.document.ci.yaml"],"Other — docker-compose.document.test.yaml":["docker-compose.document.test.yaml"],"Other — docker-compose.document.yaml":["docker-compose.document.yaml"],"Other — docker-compose.relational.ci.yaml":["docker-compose.relational.ci.yaml"],"Other — docker-compose.relational.test.yaml":["docker-compose.relational.test.yaml"],"Other — docker-compose.yaml":["docker-compose.yaml"],"Other — docs":["docs/INSTALL_VERIFY.md","docs/SPEC.md","docs/architecture.md","docs/auth.md","docs/automatic-update-dependencies.md","docs/benchmarking.md","docs/cli.md","docs/database.md","docs/design-review-1-opus.md","docs/design-review-2-opus.md","docs/file-uploading.md","docs/installing-and-running.md","docs/introduction.md","docs/production-harness-gap-analysis.md","docs/readme.md","docs/serialization.md","docs/tests.md","docs/translations.md"],"Other — agents":["docs/agents/domain.md","docs/agents/issue-tracker.md","docs/agents/triage-labels.md"],"Other — superpowers":["docs/superpowers/plans/2026-05-02-symphony-adaptation.md","docs/superpowers/specs/2026-05-02-symphony-adaptation-design.md"],"Other — document.Dockerfile":["document.Dockerfile"],"Other — document.e2e.Dockerfile":["document.e2e.Dockerfile"],"Other — document.test.Dockerfile":["document.test.Dockerfile"],"Other — env-example-document":["env-example-document"],"Other — env-example-relational":["env-example-relational"],"Other — eslint.config.mjs":["eslint.config.mjs"],"Other — graphify-out":["graphify-out/GRAPH_REPORT.md","graphify-out/cost.json","graphify-out/graph.html","graphify-out/graph.json","graphify-out/manifest.json"],"Other — maildev.Dockerfile":["maildev.Dockerfile"],"Other — nest-cli.json":["nest-cli.json"],"Other — package.json":["package.json"],"Other — relational.e2e.Dockerfile":["relational.e2e.Dockerfile"],"Other — relational.test.Dockerfile":["relational.test.Dockerfile"],"Other — renovate.json":["renovate.json"],"Other — skills-lock.json":["skills-lock.json"],"Other — startup.document.ci.sh":["startup.document.ci.sh"],"Other — startup.document.dev.sh":["startup.document.dev.sh"],"Other — startup.document.test.sh":["startup.document.test.sh"],"Other — startup.relational.ci.sh":["startup.relational.ci.sh"],"Other — startup.relational.dev.sh":["startup.relational.dev.sh"],"Other — startup.relational.test.sh":["startup.relational.test.sh"],"Other — admin":["test/admin/auth.e2e-spec.ts","test/admin/users.e2e-spec.ts"],"Other — test":["test/jest-e2e.json"],"Other — user":["test/user/auth.e2e-spec.ts"],"Other — utils":["test/utils/constants.ts"],"Other — tsconfig.build.json":["tsconfig.build.json"],"Other — tsconfig.json":["tsconfig.json"],"Other — wait-for-it.sh":["wait-for-it.sh"]},"moduleTree":[{"name":"Authentication","slug":"authentication","files":["src/auth/auth.controller.ts","src/auth/auth.module.ts","src/auth/auth.service.ts","src/auth/auth-providers.enum.ts","src/auth/config/auth-config.type.ts","src/auth/config/auth.config.ts","src/auth/dto/auth-confirm-email.dto.ts","src/auth/dto/auth-email-login.dto.ts","src/auth/dto/auth-forgot-password.dto.ts","src/auth/dto/auth-register-login.dto.ts","src/auth/dto/auth-reset-password.dto.ts","src/auth/dto/auth-update.dto.ts","src/auth/dto/login-response.dto.ts","src/auth/dto/refresh-response.dto.ts","src/auth/strategies/anonymous.strategy.ts","src/auth/strategies/jwt-refresh.strategy.ts","src/auth/strategies/jwt.strategy.ts","src/auth/strategies/types/jwt-payload.type.ts","src/auth/strategies/types/jwt-refresh-payload.type.ts","src/auth-apple/auth-apple.controller.ts","src/auth-apple/auth-apple.module.ts","src/auth-apple/auth-apple.service.ts","src/auth-apple/config/apple-config.type.ts","src/auth-apple/config/apple.config.ts","src/auth-apple/dto/auth-apple-login.dto.ts","src/auth-facebook/auth-facebook.controller.ts","src/auth-facebook/auth-facebook.module.ts","src/auth-facebook/auth-facebook.service.ts","src/auth-facebook/config/facebook-config.type.ts","src/auth-facebook/config/facebook.config.ts","src/auth-facebook/dto/auth-facebook-login.dto.ts","src/auth-facebook/interfaces/facebook.interface.ts","src/auth-google/auth-google.controller.ts","src/auth-google/auth-google.module.ts","src/auth-google/auth-google.service.ts","src/auth-google/config/google-config.type.ts","src/auth-google/config/google.config.ts","src/auth-google/dto/auth-google-login.dto.ts","src/session/domain/session.ts","src/session/infrastructure/persistence/document/document-persistence.module.ts","src/session/infrastructure/persistence/document/entities/session.schema.ts","src/session/infrastructure/persistence/document/mappers/session.mapper.ts","src/session/infrastructure/persistence/document/repositories/session.repository.ts","src/session/infrastructure/persistence/relational/entities/session.entity.ts","src/session/infrastructure/persistence/relational/mappers/session.mapper.ts","src/session/infrastructure/persistence/relational/relational-persistence.module.ts","src/session/infrastructure/persistence/relational/repositories/session.repository.ts","src/session/infrastructure/persistence/session.repository.ts","src/session/session.module.ts","src/session/session.service.ts","src/roles/domain/role.ts","src/roles/dto/role.dto.ts","src/roles/infrastructure/persistence/document/entities/role.schema.ts","src/roles/infrastructure/persistence/relational/entities/role.entity.ts","src/roles/roles.decorator.ts","src/roles/roles.enum.ts","src/roles/roles.guard.ts","src/social/interfaces/social.interface.ts","src/social/tokens.ts"]},{"name":"User Management","slug":"user-management","files":["src/users/domain/user.ts","src/users/dto/create-user.dto.ts","src/users/dto/query-user.dto.ts","src/users/dto/update-user.dto.ts","src/users/dto/user.dto.ts","src/users/infrastructure/persistence/document/document-persistence.module.ts","src/users/infrastructure/persistence/document/entities/user.schema.ts","src/users/infrastructure/persistence/document/mappers/user.mapper.ts","src/users/infrastructure/persistence/document/repositories/user.repository.ts","src/users/infrastructure/persistence/relational/entities/user.entity.ts","src/users/infrastructure/persistence/relational/mappers/user.mapper.ts","src/users/infrastructure/persistence/relational/relational-persistence.module.ts","src/users/infrastructure/persistence/relational/repositories/user.repository.ts","src/users/infrastructure/persistence/user.repository.ts","src/users/users.controller.ts","src/users/users.module.ts","src/users/users.service.ts"]},{"name":"File Management","slug":"file-management","files":["src/files/config/file-config.type.ts","src/files/config/file.config.ts","src/files/domain/file.ts","src/files/dto/file.dto.ts","src/files/files.module.ts","src/files/files.service.ts","src/files/infrastructure/persistence/document/document-persistence.module.ts","src/files/infrastructure/persistence/document/entities/file.schema.ts","src/files/infrastructure/persistence/document/mappers/file.mapper.ts","src/files/infrastructure/persistence/document/repositories/file.repository.ts","src/files/infrastructure/persistence/file.repository.ts","src/files/infrastructure/persistence/relational/entities/file.entity.ts","src/files/infrastructure/persistence/relational/mappers/file.mapper.ts","src/files/infrastructure/persistence/relational/relational-persistence.module.ts","src/files/infrastructure/persistence/relational/repositories/file.repository.ts","src/files/infrastructure/uploader/local/dto/file-response.dto.ts","src/files/infrastructure/uploader/local/files.controller.ts","src/files/infrastructure/uploader/local/files.module.ts","src/files/infrastructure/uploader/local/files.service.ts","src/files/infrastructure/uploader/s3-presigned/dto/file-response.dto.ts","src/files/infrastructure/uploader/s3-presigned/dto/file.dto.ts","src/files/infrastructure/uploader/s3-presigned/files.controller.ts","src/files/infrastructure/uploader/s3-presigned/files.module.ts","src/files/infrastructure/uploader/s3-presigned/files.service.ts","src/files/infrastructure/uploader/s3/dto/file-response.dto.ts","src/files/infrastructure/uploader/s3/files.controller.ts","src/files/infrastructure/uploader/s3/files.module.ts","src/files/infrastructure/uploader/s3/files.service.ts"]},{"name":"Database Layer","slug":"database-layer","files":["src/database/config/database-config.type.ts","src/database/config/database.config.ts","src/database/data-source.ts","src/database/migrations/1715028537217-CreateUser.ts","src/database/mongoose-config.service.ts","src/database/typeorm-config.service.ts","src/database/seeds/document/run-seed.ts","src/database/seeds/document/seed.module.ts","src/database/seeds/document/user/user-seed.module.ts","src/database/seeds/document/user/user-seed.service.ts","src/database/seeds/relational/role/role-seed.module.ts","src/database/seeds/relational/role/role-seed.service.ts","src/database/seeds/relational/run-seed.ts","src/database/seeds/relational/seed.module.ts","src/database/seeds/relational/status/status-seed.module.ts","src/database/seeds/relational/status/status-seed.service.ts","src/database/seeds/relational/user/user-seed.module.ts","src/database/seeds/relational/user/user-seed.service.ts","src/config/app-config.type.ts","src/config/app.config.ts","src/config/config.type.ts"]},{"name":"Email Service","slug":"email-service","files":["src/mail/config/mail-config.type.ts","src/mail/config/mail.config.ts","src/mail/interfaces/mail-data.interface.ts","src/mail/mail-templates/activation.hbs","src/mail/mail-templates/confirm-new-email.hbs","src/mail/mail-templates/reset-password.hbs","src/mail/mail.module.ts","src/mail/mail.service.ts","src/mailer/mailer.module.ts","src/mailer/mailer.service.ts"]},{"name":"Symphony Workflow","slug":"symphony-workflow","files":["src/symphony/application/orchestrator.service.ts","src/symphony/application/persistence.service.ts","src/symphony/application/vcs-mirror.service.ts","src/symphony/application/workflow.service.spec.ts","src/symphony/application/workflow.service.ts","src/symphony/domain/entities/issue.entity.ts","src/symphony/domain/entities/session.entity.ts","src/symphony/domain/entities/workflow-config.entity.ts","src/symphony/domain/entities/workflow-definition.entity.ts","src/symphony/domain/ports/agent.port.ts","src/symphony/domain/ports/tracker.port.ts","src/symphony/domain/ports/vcs.port.ts","src/symphony/domain/ports/workspace.port.ts","src/symphony/infrastructure/adapters/agent/jules.adapter.ts","src/symphony/infrastructure/adapters/tracker/plane.adapter.ts","src/symphony/infrastructure/adapters/vcs/github.adapter.ts","src/symphony/infrastructure/adapters/vcs/gitlab.adapter.ts","src/symphony/infrastructure/adapters/workspace/local-workspace.adapter.ts","src/symphony/infrastructure/clients/jules.client.ts","src/symphony/infrastructure/clients/plane.client.ts","src/symphony/symphony.module.ts"]},{"name":"Status Domain","slug":"status-domain","files":["src/statuses/domain/status.ts","src/statuses/dto/status.dto.ts","src/statuses/infrastructure/persistence/document/entities/status.schema.ts","src/statuses/infrastructure/persistence/relational/entities/status.entity.ts","src/statuses/statuses.enum.ts"]},{"name":"Internationalization","slug":"internationalization","files":["src/i18n/ar/common.json","src/i18n/ar/confirm-email.json","src/i18n/ar/confirm-new-email.json","src/i18n/ar/reset-password.json","src/i18n/en/common.json","src/i18n/en/confirm-email.json","src/i18n/en/confirm-new-email.json","src/i18n/en/reset-password.json","src/i18n/es/common.json","src/i18n/es/confirm-email.json","src/i18n/es/confirm-new-email.json","src/i18n/es/reset-password.json","src/i18n/fr/common.json","src/i18n/fr/confirm-email.json","src/i18n/fr/confirm-new-email.json","src/i18n/fr/reset-password.json","src/i18n/hi/common.json","src/i18n/hi/confirm-email.json","src/i18n/hi/confirm-new-email.json","src/i18n/hi/reset-password.json","src/i18n/uk/common.json","src/i18n/uk/confirm-email.json","src/i18n/uk/confirm-new-email.json","src/i18n/uk/reset-password.json","src/i18n/zh/common.json","src/i18n/zh/confirm-email.json","src/i18n/zh/confirm-new-email.json","src/i18n/zh/reset-password.json"]},{"name":"Utilities","slug":"utilities","files":["src/utils/deep-resolver.ts","src/utils/document-entity-helper.ts","src/utils/dto/infinity-pagination-response.dto.ts","src/utils/infinity-pagination.ts","src/utils/relational-entity-helper.ts","src/utils/serializer.interceptor.ts","src/utils/transformers/lower-case.transformer.ts","src/utils/types/deep-partial.type.ts","src/utils/types/maybe.type.ts","src/utils/types/nullable.type.ts","src/utils/types/or-never.type.ts","src/utils/types/pagination-options.ts","src/utils/validate-config.ts","src/utils/validation-options.ts"]},{"name":"Application Core","slug":"application-core","files":["src/app.module.ts","src/main.ts","src/home/home.controller.ts","src/home/home.module.ts","src/home/home.service.ts"]},{"name":"Other","slug":"other","files":[],"children":[{"name":"Other — CLAUDE.md","slug":"other-claude-md","files":["CLAUDE.md"]},{"name":"Other — CONTEXT.md","slug":"other-context-md","files":["CONTEXT.md"]},{"name":"Other — Dockerfile","slug":"other-dockerfile","files":["Dockerfile"]},{"name":"Other — GEMINI.md","slug":"other-gemini-md","files":["GEMINI.md"]},{"name":"Other — Procfile","slug":"other-procfile","files":["Procfile"]},{"name":"Other — README.md","slug":"other-readme-md","files":["README.md"]},{"name":"Other — commitlint.config.js","slug":"other-commitlint-config-js","files":["commitlint.config.js"]},{"name":"Other — coolify-docker-compose.yaml","slug":"other-coolify-docker-compose-yaml","files":["coolify-docker-compose.yaml"]},{"name":"Other — docker-compose.document.ci.yaml","slug":"other-docker-compose-document-ci-yaml","files":["docker-compose.document.ci.yaml"]},{"name":"Other — docker-compose.document.test.yaml","slug":"other-docker-compose-document-test-yaml","files":["docker-compose.document.test.yaml"]},{"name":"Other — docker-compose.document.yaml","slug":"other-docker-compose-document-yaml","files":["docker-compose.document.yaml"]},{"name":"Other — docker-compose.relational.ci.yaml","slug":"other-docker-compose-relational-ci-yaml","files":["docker-compose.relational.ci.yaml"]},{"name":"Other — docker-compose.relational.test.yaml","slug":"other-docker-compose-relational-test-yaml","files":["docker-compose.relational.test.yaml"]},{"name":"Other — docker-compose.yaml","slug":"other-docker-compose-yaml","files":["docker-compose.yaml"]},{"name":"Other — docs","slug":"other-docs","files":["docs/INSTALL_VERIFY.md","docs/SPEC.md","docs/architecture.md","docs/auth.md","docs/automatic-update-dependencies.md","docs/benchmarking.md","docs/cli.md","docs/database.md","docs/design-review-1-opus.md","docs/design-review-2-opus.md","docs/file-uploading.md","docs/installing-and-running.md","docs/introduction.md","docs/production-harness-gap-analysis.md","docs/readme.md","docs/serialization.md","docs/tests.md","docs/translations.md"]},{"name":"Other — agents","slug":"other-agents","files":["docs/agents/domain.md","docs/agents/issue-tracker.md","docs/agents/triage-labels.md"]},{"name":"Other — superpowers","slug":"other-superpowers","files":["docs/superpowers/plans/2026-05-02-symphony-adaptation.md","docs/superpowers/specs/2026-05-02-symphony-adaptation-design.md"]},{"name":"Other — document.Dockerfile","slug":"other-document-dockerfile","files":["document.Dockerfile"]},{"name":"Other — document.e2e.Dockerfile","slug":"other-document-e2e-dockerfile","files":["document.e2e.Dockerfile"]},{"name":"Other — document.test.Dockerfile","slug":"other-document-test-dockerfile","files":["document.test.Dockerfile"]},{"name":"Other — env-example-document","slug":"other-env-example-document","files":["env-example-document"]},{"name":"Other — env-example-relational","slug":"other-env-example-relational","files":["env-example-relational"]},{"name":"Other — eslint.config.mjs","slug":"other-eslint-config-mjs","files":["eslint.config.mjs"]},{"name":"Other — graphify-out","slug":"other-graphify-out","files":["graphify-out/GRAPH_REPORT.md","graphify-out/cost.json","graphify-out/graph.html","graphify-out/graph.json","graphify-out/manifest.json"]},{"name":"Other — maildev.Dockerfile","slug":"other-maildev-dockerfile","files":["maildev.Dockerfile"]},{"name":"Other — nest-cli.json","slug":"other-nest-cli-json","files":["nest-cli.json"]},{"name":"Other — package.json","slug":"other-package-json","files":["package.json"]},{"name":"Other — relational.e2e.Dockerfile","slug":"other-relational-e2e-dockerfile","files":["relational.e2e.Dockerfile"]},{"name":"Other — relational.test.Dockerfile","slug":"other-relational-test-dockerfile","files":["relational.test.Dockerfile"]},{"name":"Other — renovate.json","slug":"other-renovate-json","files":["renovate.json"]},{"name":"Other — skills-lock.json","slug":"other-skills-lock-json","files":["skills-lock.json"]},{"name":"Other — startup.document.ci.sh","slug":"other-startup-document-ci-sh","files":["startup.document.ci.sh"]},{"name":"Other — startup.document.dev.sh","slug":"other-startup-document-dev-sh","files":["startup.document.dev.sh"]},{"name":"Other — startup.document.test.sh","slug":"other-startup-document-test-sh","files":["startup.document.test.sh"]},{"name":"Other — startup.relational.ci.sh","slug":"other-startup-relational-ci-sh","files":["startup.relational.ci.sh"]},{"name":"Other — startup.relational.dev.sh","slug":"other-startup-relational-dev-sh","files":["startup.relational.dev.sh"]},{"name":"Other — startup.relational.test.sh","slug":"other-startup-relational-test-sh","files":["startup.relational.test.sh"]},{"name":"Other — admin","slug":"other-admin","files":["test/admin/auth.e2e-spec.ts","test/admin/users.e2e-spec.ts"]},{"name":"Other — test","slug":"other-test","files":["test/jest-e2e.json"]},{"name":"Other — user","slug":"other-user","files":["test/user/auth.e2e-spec.ts"]},{"name":"Other — utils","slug":"other-utils","files":["test/utils/constants.ts"]},{"name":"Other — tsconfig.build.json","slug":"other-tsconfig-build-json","files":["tsconfig.build.json"]},{"name":"Other — tsconfig.json","slug":"other-tsconfig-json","files":["tsconfig.json"]},{"name":"Other — wait-for-it.sh","slug":"other-wait-for-it-sh","files":["wait-for-it.sh"]}]}]}; | |
| (function() { | |
| var activePage = 'overview'; | |
| document.addEventListener('DOMContentLoaded', function() { | |
| mermaid.initialize({ startOnLoad: false, theme: 'neutral', securityLevel: 'loose' }); | |
| renderMeta(); | |
| renderNav(); | |
| document.getElementById('menu-toggle').addEventListener('click', function() { | |
| document.getElementById('sidebar').classList.toggle('open'); | |
| }); | |
| if (location.hash && location.hash.length > 1) { | |
| activePage = decodeURIComponent(location.hash.slice(1)); | |
| } | |
| navigateTo(activePage); | |
| }); | |
| function renderMeta() { | |
| if (!META) return; | |
| var el = document.getElementById('meta-info'); | |
| var parts = []; | |
| if (META.generatedAt) { | |
| parts.push(new Date(META.generatedAt).toLocaleDateString()); | |
| } | |
| if (META.model) parts.push(META.model); | |
| if (META.fromCommit) parts.push(META.fromCommit.slice(0, 8)); | |
| el.textContent = parts.join(' \u00b7 '); | |
| } | |
| function renderNav() { | |
| var container = document.getElementById('nav-tree'); | |
| var html = '<div class="nav-section">'; | |
| html += '<a class="nav-item overview" data-page="overview" href="#overview">Overview</a>'; | |
| html += '</div>'; | |
| if (TREE.length > 0) { | |
| html += '<div class="nav-group-label">Modules</div>'; | |
| html += buildNavTree(TREE); | |
| } | |
| container.innerHTML = html; | |
| container.addEventListener('click', function(e) { | |
| var target = e.target; | |
| while (target && !target.dataset.page) { target = target.parentElement; } | |
| if (target && target.dataset.page) { | |
| e.preventDefault(); | |
| navigateTo(target.dataset.page); | |
| } | |
| }); | |
| } | |
| function buildNavTree(nodes) { | |
| var html = ''; | |
| for (var i = 0; i < nodes.length; i++) { | |
| var node = nodes[i]; | |
| html += '<div class="nav-section">'; | |
| html += '<a class="nav-item" data-page="' + escH(node.slug) + '" href="#' + encodeURIComponent(node.slug) + '">' + escH(node.name) + '</a>'; | |
| if (node.children && node.children.length > 0) { | |
| html += '<div class="nav-children">' + buildNavTree(node.children) + '</div>'; | |
| } | |
| html += '</div>'; | |
| } | |
| return html; | |
| } | |
| function escH(s) { | |
| var d = document.createElement('div'); | |
| d.textContent = s; | |
| return d.innerHTML; | |
| } | |
| function navigateTo(page) { | |
| activePage = page; | |
| location.hash = encodeURIComponent(page); | |
| var items = document.querySelectorAll('.nav-item'); | |
| for (var i = 0; i < items.length; i++) { | |
| if (items[i].dataset.page === page) { | |
| items[i].classList.add('active'); | |
| } else { | |
| items[i].classList.remove('active'); | |
| } | |
| } | |
| var contentEl = document.getElementById('content'); | |
| var md = PAGES[page]; | |
| if (!md) { | |
| contentEl.innerHTML = '<div class="empty-state"><h2>Page not found</h2><p>' + escH(page) + '.md does not exist.</p></div>'; | |
| return; | |
| } | |
| contentEl.innerHTML = marked.parse(md); | |
| // Rewrite .md links to hash navigation | |
| var links = contentEl.querySelectorAll('a[href]'); | |
| for (var i = 0; i < links.length; i++) { | |
| var href = links[i].getAttribute('href'); | |
| if (href && href.endsWith('.md') && href.indexOf('://') === -1) { | |
| var slug = href.replace(/\.md$/, ''); | |
| links[i].setAttribute('href', '#' + encodeURIComponent(slug)); | |
| (function(s) { | |
| links[i].addEventListener('click', function(e) { | |
| e.preventDefault(); | |
| navigateTo(s); | |
| }); | |
| })(slug); | |
| } | |
| } | |
| // Convert mermaid code blocks into mermaid divs | |
| var mermaidBlocks = contentEl.querySelectorAll('pre code.language-mermaid'); | |
| for (var i = 0; i < mermaidBlocks.length; i++) { | |
| var pre = mermaidBlocks[i].parentElement; | |
| var div = document.createElement('div'); | |
| div.className = 'mermaid'; | |
| div.textContent = mermaidBlocks[i].textContent; | |
| pre.parentNode.replaceChild(div, pre); | |
| } | |
| try { mermaid.run({ querySelector: '.mermaid' }); } catch(e) {} | |
| window.scrollTo(0, 0); | |
| document.getElementById('sidebar').classList.remove('open'); | |
| } | |
| })(); | |
| </script> | |
| </body> | |
| </html> |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment