Created
May 14, 2026 00:44
-
-
Save Th0rgal/2f8e9bc63887bcca4e14f45eef04cfd1 to your computer and use it in GitHub Desktop.
Unlink Security Audit Fixes - Single Commit Restore (1dc2ffb, 2026-05-14)
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| From 1dc2ffbbc53973b83f317e900674455eda74d3a2 Mon Sep 17 00:00:00 2001 | |
| From: "Thomas Marchand (agent)" <agent@thomas.md> | |
| Date: Thu, 14 May 2026 02:33:25 +0200 | |
| Subject: [PATCH] fix(security): restore all audit security remediations (from | |
| stash) | |
| --- | |
| .env.example | 3 +- | |
| .github/workflows/ci.yml | 21 - | |
| .github/workflows/contracts.yml | 2 + | |
| .github/workflows/zk.yml | 40 +- | |
| CLAUDE.md | 1 - | |
| .../projects/[projectSlug]/api-keys/page.tsx | 2 +- | |
| .../projects/[projectSlug]/layout.tsx | 2 +- | |
| .../projects/[projectSlug]/overview/page.tsx | 4 +- | |
| .../[projectSlug]/settings/general/page.tsx | 6 +- | |
| .../[projectSlug]/transactions/page.tsx | 2 +- | |
| .../projects/[projectSlug]/users/page.tsx | 2 +- | |
| frontend/dashboard/lib/api/actions.ts | 21 +- | |
| frontend/dashboard/lib/api/dashboard.d.ts | 1248 +++++++++-------- | |
| frontend/dashboard/lib/api/ratelimit.ts | 23 + | |
| frontend/dashboard/lib/api/resolvers.ts | 8 +- | |
| frontend/dashboard/package.json | 8 +- | |
| infra/fly/entrypoint.sh | 4 +- | |
| infra/local/devstack.yml | 8 +- | |
| infra/local/docker-compose.yml | 6 +- | |
| infra/local/promtail.yml | 3 +- | |
| justfile | 3 +- | |
| package.json | 3 + | |
| pnpm-lock.yaml | 935 +++--------- | |
| .../crates/api/src/auth/cached_verifier.rs | 1 + | |
| .../backend/crates/api/src/auth/extractor.rs | 3 + | |
| .../backend/crates/api/src/auth/middleware.rs | 1 + | |
| .../backend/crates/api/src/auth/verifier.rs | 18 +- | |
| .../crates/api/src/dashboard/handlers.rs | 22 +- | |
| .../crates/api/src/dashboard/middleware.rs | 27 +- | |
| .../backend/crates/api/src/dashboard/types.rs | 16 + | |
| .../backend/crates/api/src/test_support.rs | 3 + | |
| .../crates/api/src/transactions/handlers.rs | 24 +- | |
| protocol/backend/crates/core/src/burner.rs | 9 + | |
| protocol/backend/crates/core/src/models.rs | 2 - | |
| .../crates/core/src/services/test_support.rs | 7 +- | |
| .../crates/core/src/services/transaction.rs | 125 +- | |
| protocol/backend/crates/core/src/traits.rs | 11 +- | |
| .../backend/crates/ingester/src/goldsky.rs | 15 +- | |
| .../crates/storage/src/burner_store.rs | 10 + | |
| .../crates/storage/src/transaction_store.rs | 21 +- | |
| protocol/backend/crates/workers/src/lib.rs | 4 +- | |
| .../crates/workers/src/prove/worker.rs | 9 +- | |
| .../crates/workers/src/relay/broadcast.rs | 82 +- | |
| .../backend/crates/workers/src/relay/db.rs | 52 +- | |
| .../backend/crates/workers/src/relay/mod.rs | 5 +- | |
| .../workers/src/relay/tests/broadcast.rs | 16 +- | |
| .../crates/workers/src/relay/tests/burner.rs | 7 +- | |
| .../crates/workers/src/relay/tests/db.rs | 20 +- | |
| .../crates/workers/src/relay/tests/health.rs | 12 +- | |
| .../crates/workers/src/relay/tests/mod.rs | 22 +- | |
| .../crates/workers/src/relay/tests/outcome.rs | 9 +- | |
| .../workers/src/relay/tests/recovery.rs | 15 +- | |
| .../crates/workers/src/relay/tests/watch.rs | 25 +- | |
| .../crates/workers/src/relay/traits.rs | 46 +- | |
| .../crates/workers/src/relay/worker.rs | 22 +- | |
| .../backend/tests/tests/burner_gas_funding.rs | 50 +- | |
| .../backend/tests/tests/full_engine/mod.rs | 15 +- | |
| .../backend/tests/tests/relayer_worker.rs | 20 +- | |
| protocol/backend/tests/tests/support/mod.rs | 20 +- | |
| protocol/cli/scripts/stress.ts | 6 +- | |
| protocol/cli/src/commands/balance.ts | 8 + | |
| protocol/cli/src/signer.ts | 6 + | |
| protocol/cli/src/wallet.ts | 33 +- | |
| protocol/openapi/dashboard.yaml | 5 +- | |
| protocol/sdk/src/__tests__/burner.test.ts | 96 +- | |
| protocol/sdk/src/__tests__/transfer.test.ts | 139 +- | |
| protocol/sdk/src/__tests__/unlink.test.ts | 65 +- | |
| protocol/sdk/src/__tests__/withdraw.test.ts | 135 +- | |
| protocol/sdk/src/burner.ts | 17 + | |
| protocol/sdk/src/transactions/transfer.ts | 22 + | |
| protocol/sdk/src/transactions/withdraw.ts | 29 + | |
| protocol/zk/circuits/spend.circom | 10 + | |
| protocol/zk/package.json | 5 +- | |
| protocol/zk/scripts/download-artifacts.sh | 168 ++- | |
| protocol/zk/scripts/harden_verifier.py | 56 +- | |
| protocol/zk/scripts/preflight-test-zk.sh | 54 +- | |
| protocol/zk/scripts/test-witness.sh | 40 +- | |
| protocol/zk/scripts/upload-artifacts.sh | 48 +- | |
| 78 files changed, 2358 insertions(+), 1705 deletions(-) | |
| create mode 100644 frontend/dashboard/lib/api/ratelimit.ts | |
| diff --git a/.env.example b/.env.example | |
| index 5d91cda..b225f96 100644 | |
| --- a/.env.example | |
| +++ b/.env.example | |
| @@ -18,7 +18,8 @@ RPC_URL=http://127.0.0.1:8545 | |
| # RPC_ACTIVE_TRANSPORTS=2 # Top-N transports kept active; rest are warm standby (default: min(2, RPC_URLS count)) | |
| # RPC_URL_READ= # Optional: separate RPC for read-only queries (falls back to RPC_URL[S][0]) | |
| CONTRACT_ADDRESS= | |
| -RELAYER_PRIVATE_KEYS=0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d | |
| +# RELAYER_PRIVATE_KEYS= # Set via: op run --env-file=.env -- just devstack | |
| +RELAYER_PRIVATE_KEYS= | |
| # RELAYER_PRIVATE_KEYS=0xkey0,0xkey1,0xkey2,0xkey3 # Multi-relayer pool (comma-separated) | |
| CHAIN_ID=31337 | |
| PERMIT2_ADDRESS= | |
| diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml | |
| index da7c9cf..a6deccc 100644 | |
| --- a/.github/workflows/ci.yml | |
| +++ b/.github/workflows/ci.yml | |
| @@ -81,27 +81,6 @@ jobs: | |
| - run: pnpm format:check | |
| - landing-check: | |
| - name: Landing app check | |
| - runs-on: ubuntu-latest | |
| - timeout-minutes: 10 | |
| - steps: | |
| - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| - | |
| - - uses: ./.github/actions/setup-toolchain | |
| - with: | |
| - node: "true" | |
| - pnpm-install: "true" | |
| - | |
| - - name: Typecheck landing | |
| - run: pnpm --filter "@unlink/landing" typecheck | |
| - | |
| - - name: Lint landing | |
| - run: pnpm --filter "@unlink/landing" lint | |
| - | |
| - - name: Build landing | |
| - run: pnpm --filter "@unlink/landing" build | |
| - | |
| migration-guard: | |
| name: Migration integrity check | |
| if: github.event_name == 'pull_request' && !contains(github.event.pull_request.labels.*.name, 'migration-reset') | |
| diff --git a/.github/workflows/contracts.yml b/.github/workflows/contracts.yml | |
| index 95955da..136fb31 100644 | |
| --- a/.github/workflows/contracts.yml | |
| +++ b/.github/workflows/contracts.yml | |
| @@ -6,12 +6,14 @@ on: | |
| - ".github/workflows/contracts.yml" | |
| - ".github/actions/setup-toolchain/**" | |
| - "protocol/contracts/**" | |
| + - "protocol/tests/vectors/**" | |
| push: | |
| branches: [main] | |
| paths: | |
| - ".github/workflows/contracts.yml" | |
| - ".github/actions/setup-toolchain/**" | |
| - "protocol/contracts/**" | |
| + - "protocol/tests/vectors/**" | |
| workflow_dispatch: | |
| concurrency: | |
| diff --git a/.github/workflows/zk.yml b/.github/workflows/zk.yml | |
| index 623e22e..a663b5a 100644 | |
| --- a/.github/workflows/zk.yml | |
| +++ b/.github/workflows/zk.yml | |
| @@ -4,16 +4,34 @@ on: | |
| pull_request: | |
| paths: | |
| - ".github/workflows/zk.yml" | |
| + - ".github/workflows/zk-proof-nightly.yml" | |
| - ".github/actions/setup-toolchain/**" | |
| - "protocol/zk/circuits/**" | |
| + - "protocol/zk/scripts/**" | |
| + - "protocol/zk/tests/**" | |
| + - "protocol/zk/circomkit.json" | |
| - "protocol/zk/package.json" | |
| + - "protocol/zk/vitest.config.*" | |
| + - "protocol/zk/patches/**" | |
| + - "protocol/zk/Dockerfile.witness-test" | |
| + - "patches/**" | |
| + - "pnpm-lock.yaml" | |
| push: | |
| branches: [main] | |
| paths: | |
| - ".github/workflows/zk.yml" | |
| + - ".github/workflows/zk-proof-nightly.yml" | |
| - ".github/actions/setup-toolchain/**" | |
| - "protocol/zk/circuits/**" | |
| + - "protocol/zk/scripts/**" | |
| + - "protocol/zk/tests/**" | |
| + - "protocol/zk/circomkit.json" | |
| - "protocol/zk/package.json" | |
| + - "protocol/zk/vitest.config.*" | |
| + - "protocol/zk/patches/**" | |
| + - "protocol/zk/Dockerfile.witness-test" | |
| + - "patches/**" | |
| + - "pnpm-lock.yaml" | |
| workflow_dispatch: | |
| concurrency: | |
| @@ -91,18 +109,38 @@ jobs: | |
| if-no-files-found: error | |
| retention-days: 30 | |
| - - name: Gate (fail on error-level findings) | |
| + - name: Gate (fail on error-level findings, warn on warning drift) | |
| run: | | |
| # No `|| fallback`: a malformed SARIF should fail the gate (errexit | |
| # + pipefail are GitHub bash defaults), not silently pass. | |
| ERRORS=$(jq '[.runs[].results[] | select(.level == "error")] | length' circomspect.sarif) | |
| + WARNINGS=$(jq '[.runs[].results[] | select(.level == "warning")] | length' circomspect.sarif) | |
| echo "Error-level findings: ${ERRORS}" | |
| + echo "Warning-level findings: ${WARNINGS}" | |
| if [ "${ERRORS}" -gt 0 ]; then | |
| echo "::error::Circomspect produced ${ERRORS} error-level finding(s). See step summary and download the circomspect-sarif artifact for details." | |
| jq '.runs[].results[] | select(.level == "error")' circomspect.sarif | |
| exit 1 | |
| fi | |
| + # Warning drift detection: fail if warning count changes from baseline. | |
| + # Known baseline from audit (2026-05-01): 3 warnings (2 signal-assignment | |
| + # in spend.circom, 1 Num2Bits non-strict in merkle_proof.circom). | |
| + # This prevents silent introduction of new warning-class issues. | |
| + KNOWN_WARNING_BASELINE=3 | |
| + if [ "${WARNINGS}" -gt 0 ]; then | |
| + if [ "${WARNINGS}" -ne "${KNOWN_WARNING_BASELINE}" ]; then | |
| + echo "::error::Circomspect warning count (${WARNINGS}) differs from baseline (${KNOWN_WARNING_BASELINE})." | |
| + echo "Known warnings:" | |
| + jq '.runs[].results[] | select(.level == "warning") | {rule: .rule.id, file: .locations[0].physicalLocation.artifactLocation.uri, line: .locations[0].physicalLocation.region.startLine}' circomspect.sarif | |
| + echo "If this is an expected change, update KNOWN_WARNING_BASELINE in .github/workflows/zk.yml" | |
| + exit 1 | |
| + else | |
| + echo "Warning count matches baseline (${KNOWN_WARNING_BASELINE}). Known acceptable findings:" | |
| + jq '.runs[].results[] | select(.level == "warning") | {rule: .rule.id, file: .locations[0].physicalLocation.artifactLocation.uri}' circomspect.sarif | |
| + fi | |
| + fi | |
| + | |
| # Structural regression guard for ENG-469's `Num2Bits_strict` swap. The test | |
| # auto-compiles `spend_10x4_v1` via circomkit and asserts every slot in the SHA | |
| # preimage loop instantiates AliasCheck. Cheap (~30s once cached) and runs | |
| diff --git a/CLAUDE.md b/CLAUDE.md | |
| index fc0fd1b..b2d93d9 100644 | |
| --- a/CLAUDE.md | |
| +++ b/CLAUDE.md | |
| @@ -55,7 +55,6 @@ Do not read or search — generated/low-value: | |
| - Never commit secrets, .env files, or API keys | |
| - Never modify an existing migration file after it has been merged. Always create a new migration (e.g., 0002\_\*.sql) instead. | |
| - When a PR touches code in an area with a README or `docs/` page, verify the README/doc still matches reality and update it in the same PR. Prefer linking to `docs/public/` (customer) or `docs/internal/` (team) over duplicating content in component READMEs. | |
| -- **Frontend / UI work**: before completing changes under `frontend/`, run the `web-design-guidelines` skill on touched files (accessibility, focus states, typography, responsive behavior). Install if missing: `npx skills add https://github.com/vercel-labs/agent-skills --skill web-design-guidelines`. | |
| ## GitHub | |
| diff --git a/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/api-keys/page.tsx b/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/api-keys/page.tsx | |
| index d126309..5f7c64a 100644 | |
| --- a/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/api-keys/page.tsx | |
| +++ b/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/api-keys/page.tsx | |
| @@ -21,7 +21,7 @@ interface Props { | |
| export default async function ApiKeysPage({ params }: Props) { | |
| const { orgSlug, projectSlug } = await params; | |
| - const project = await resolveProjectBySlug(projectSlug); | |
| + const project = await resolveProjectBySlug(projectSlug, orgSlug); | |
| if (!project) notFound(); | |
| const keys = await listApiKeys(project.id); | |
| diff --git a/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/layout.tsx b/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/layout.tsx | |
| index e977bb1..be72510 100644 | |
| --- a/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/layout.tsx | |
| +++ b/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/layout.tsx | |
| @@ -11,7 +11,7 @@ interface Props { | |
| export default async function ProjectLayout({ children, params }: Props) { | |
| const { orgSlug, projectSlug } = await params; | |
| - const project = await resolveProjectBySlug(projectSlug); | |
| + const project = await resolveProjectBySlug(projectSlug, orgSlug); | |
| if (!project) notFound(); | |
| const projects = await listProjects(); | |
| diff --git a/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/overview/page.tsx b/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/overview/page.tsx | |
| index ebce1e2..8830d2f 100644 | |
| --- a/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/overview/page.tsx | |
| +++ b/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/overview/page.tsx | |
| @@ -10,8 +10,8 @@ interface Props { | |
| } | |
| export default async function OverviewPage({ params }: Props) { | |
| - const { projectSlug } = await params; | |
| - const project = await resolveProjectBySlug(projectSlug); | |
| + const { orgSlug, projectSlug } = await params; | |
| + const project = await resolveProjectBySlug(projectSlug, orgSlug); | |
| if (!project) notFound(); | |
| const overview = await getProjectOverview(project.id); | |
| diff --git a/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/settings/general/page.tsx b/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/settings/general/page.tsx | |
| index fadd8f0..d35a45c 100644 | |
| --- a/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/settings/general/page.tsx | |
| +++ b/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/settings/general/page.tsx | |
| @@ -4,12 +4,12 @@ import { resolveProjectBySlug } from "@/lib/api/resolvers"; | |
| import { notFound } from "next/navigation"; | |
| interface Props { | |
| - params: Promise<{ projectSlug: string }>; | |
| + params: Promise<{ orgSlug: string; projectSlug: string }>; | |
| } | |
| export default async function ProjectSettingsGeneralPage({ params }: Props) { | |
| - const { projectSlug } = await params; | |
| - const project = await resolveProjectBySlug(projectSlug); | |
| + const { orgSlug, projectSlug } = await params; | |
| + const project = await resolveProjectBySlug(projectSlug, orgSlug); | |
| if (!project) notFound(); | |
| return ( | |
| diff --git a/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/transactions/page.tsx b/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/transactions/page.tsx | |
| index 046c9d4..edb5444 100644 | |
| --- a/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/transactions/page.tsx | |
| +++ b/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/transactions/page.tsx | |
| @@ -31,7 +31,7 @@ export default async function TransactionsPage({ | |
| const { orgSlug, projectSlug } = await params; | |
| const { cursor } = await searchParams; | |
| - const project = await resolveProjectBySlug(projectSlug); | |
| + const project = await resolveProjectBySlug(projectSlug, orgSlug); | |
| if (!project) notFound(); | |
| const page = await listProjectTransactions(project.id, cursor); | |
| diff --git a/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/users/page.tsx b/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/users/page.tsx | |
| index b940e5b..aabe606 100644 | |
| --- a/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/users/page.tsx | |
| +++ b/frontend/dashboard/app/orgs/[orgSlug]/projects/[projectSlug]/users/page.tsx | |
| @@ -22,7 +22,7 @@ export default async function UsersPage({ params, searchParams }: Props) { | |
| const { orgSlug, projectSlug } = await params; | |
| const { cursor } = await searchParams; | |
| - const project = await resolveProjectBySlug(projectSlug); | |
| + const project = await resolveProjectBySlug(projectSlug, orgSlug); | |
| if (!project) notFound(); | |
| const page = await listProjectUsers(project.id, cursor); | |
| diff --git a/frontend/dashboard/lib/api/actions.ts b/frontend/dashboard/lib/api/actions.ts | |
| index f104b11..8a23076 100644 | |
| --- a/frontend/dashboard/lib/api/actions.ts | |
| +++ b/frontend/dashboard/lib/api/actions.ts | |
| @@ -4,6 +4,11 @@ import { revalidatePath } from "next/cache"; | |
| import { redirect } from "next/navigation"; | |
| import { api, extractErrorMessage } from "./client"; | |
| +import { | |
| + apiKeyCreationLimiter, | |
| + apiKeyDisableLimiter, | |
| + projectCreationLimiter, | |
| +} from "./ratelimit"; | |
| export interface CreateProjectFormState { | |
| error: string | null; | |
| @@ -18,6 +23,11 @@ export async function createProjectAction( | |
| _prev: CreateProjectFormState, | |
| formData: FormData, | |
| ): Promise<CreateProjectFormState> { | |
| + const { success, remaining } = await projectCreationLimiter.limit(orgSlug); | |
| + if (!success) { | |
| + return { error: "Too many requests. Please try again in a moment." }; | |
| + } | |
| + | |
| const name = String(formData.get("name") ?? "").trim(); | |
| const description = String(formData.get("description") ?? "").trim(); | |
| @@ -61,6 +71,11 @@ export async function createApiKeyAction( | |
| _prev: CreateApiKeyState, | |
| formData: FormData, | |
| ): Promise<CreateApiKeyState> { | |
| + const { success, remaining } = await apiKeyCreationLimiter.limit(projectId); | |
| + if (!success) { | |
| + return { status: "error", error: "Too many requests. Please try again in a moment." }; | |
| + } | |
| + | |
| const label = String(formData.get("label") ?? "").trim(); | |
| if (label.length === 0) { | |
| return { status: "error", error: "Label is required." }; | |
| @@ -104,9 +119,13 @@ export async function disableApiKeyAction( | |
| projectId: string, | |
| keyId: string, | |
| revalidationPath: string, | |
| - // eslint-disable-next-line @typescript-eslint/no-unused-vars | |
| _prev: DisableApiKeyState, | |
| ): Promise<DisableApiKeyState> { | |
| + const { success, remaining } = await apiKeyDisableLimiter.limit(`${projectId}:${keyId}`); | |
| + if (!success) { | |
| + return { error: "Too many requests. Please try again in a moment." }; | |
| + } | |
| + | |
| const { error } = await api.POST( | |
| "/dashboard/projects/{projectId}/api-keys/{keyId}/disable", | |
| { params: { path: { projectId, keyId } } }, | |
| diff --git a/frontend/dashboard/lib/api/dashboard.d.ts b/frontend/dashboard/lib/api/dashboard.d.ts | |
| index 6223700..58b7cdc 100644 | |
| --- a/frontend/dashboard/lib/api/dashboard.d.ts | |
| +++ b/frontend/dashboard/lib/api/dashboard.d.ts | |
| @@ -4,653 +4,655 @@ | |
| */ | |
| export interface paths { | |
| - "/health": { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| - }; | |
| - /** Health check */ | |
| - get: operations["health"]; | |
| - put?: never; | |
| - post?: never; | |
| - delete?: never; | |
| - options?: never; | |
| - head?: never; | |
| - patch?: never; | |
| - trace?: never; | |
| - }; | |
| - "/dashboard/bootstrap": { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| - }; | |
| - get?: never; | |
| - put?: never; | |
| - /** | |
| - * Bootstrap a tenant | |
| - * @description Creates or updates the tenant for the authenticated Clerk org. | |
| - * Does not create a project. | |
| - */ | |
| - post: operations["bootstrapTenant"]; | |
| - delete?: never; | |
| - options?: never; | |
| - head?: never; | |
| - patch?: never; | |
| - trace?: never; | |
| - }; | |
| - "/dashboard/projects": { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| - }; | |
| - /** List projects for the tenant */ | |
| - get: operations["listProjects"]; | |
| - put?: never; | |
| - /** Create a project */ | |
| - post: operations["createProject"]; | |
| - delete?: never; | |
| - options?: never; | |
| - head?: never; | |
| - patch?: never; | |
| - trace?: never; | |
| - }; | |
| - "/dashboard/projects/{projectId}/overview": { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| - }; | |
| - /** Get project overview with KPIs and pipeline */ | |
| - get: operations["getProjectOverview"]; | |
| - put?: never; | |
| - post?: never; | |
| - delete?: never; | |
| - options?: never; | |
| - head?: never; | |
| - patch?: never; | |
| - trace?: never; | |
| - }; | |
| - "/dashboard/projects/{projectId}/users": { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| - }; | |
| - /** List project users */ | |
| - get: operations["listProjectUsers"]; | |
| - put?: never; | |
| - post?: never; | |
| - delete?: never; | |
| - options?: never; | |
| - head?: never; | |
| - patch?: never; | |
| - trace?: never; | |
| - }; | |
| - "/dashboard/projects/{projectId}/transactions": { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| - }; | |
| - /** List project transactions */ | |
| - get: operations["listProjectTransactions"]; | |
| - put?: never; | |
| - post?: never; | |
| - delete?: never; | |
| - options?: never; | |
| - head?: never; | |
| - patch?: never; | |
| - trace?: never; | |
| - }; | |
| - "/dashboard/projects/{projectId}/api-keys": { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| - }; | |
| - /** List API keys for a project */ | |
| - get: operations["listApiKeys"]; | |
| - put?: never; | |
| - /** | |
| - * Create a new API key | |
| - * @description Creates a new Unkey API key scoped to the project. | |
| - * The plaintext key is returned exactly once in the response. | |
| - */ | |
| - post: operations["createApiKey"]; | |
| - delete?: never; | |
| - options?: never; | |
| - head?: never; | |
| - patch?: never; | |
| - trace?: never; | |
| - }; | |
| - "/dashboard/projects/{projectId}/api-keys/{keyId}/disable": { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| - }; | |
| - get?: never; | |
| - put?: never; | |
| - /** | |
| - * Disable an API key | |
| - * @description Disables the key in Unkey (enabled: false). This is not a hard delete. | |
| - * The key can no longer be used for authentication after disabling. | |
| - */ | |
| - post: operations["disableApiKey"]; | |
| - delete?: never; | |
| - options?: never; | |
| - head?: never; | |
| - patch?: never; | |
| - trace?: never; | |
| - }; | |
| - "/dashboard/projects/{projectId}/users/{userAddress}/balances": { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| + "/health": { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + /** Health check */ | |
| + get: operations["health"]; | |
| + put?: never; | |
| + post?: never; | |
| + delete?: never; | |
| + options?: never; | |
| + head?: never; | |
| + patch?: never; | |
| + trace?: never; | |
| + }; | |
| + "/dashboard/bootstrap": { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + get?: never; | |
| + put?: never; | |
| + /** | |
| + * Bootstrap a tenant | |
| + * @description Creates or updates the tenant for the authenticated Clerk org. | |
| + * Does not create a project. | |
| + */ | |
| + post: operations["bootstrapTenant"]; | |
| + delete?: never; | |
| + options?: never; | |
| + head?: never; | |
| + patch?: never; | |
| + trace?: never; | |
| + }; | |
| + "/dashboard/projects": { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + /** List projects for the tenant */ | |
| + get: operations["listProjects"]; | |
| + put?: never; | |
| + /** Create a project */ | |
| + post: operations["createProject"]; | |
| + delete?: never; | |
| + options?: never; | |
| + head?: never; | |
| + patch?: never; | |
| + trace?: never; | |
| + }; | |
| + "/dashboard/projects/{projectId}/overview": { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + /** Get project overview with KPIs and pipeline */ | |
| + get: operations["getProjectOverview"]; | |
| + put?: never; | |
| + post?: never; | |
| + delete?: never; | |
| + options?: never; | |
| + head?: never; | |
| + patch?: never; | |
| + trace?: never; | |
| + }; | |
| + "/dashboard/projects/{projectId}/users": { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + /** List project users */ | |
| + get: operations["listProjectUsers"]; | |
| + put?: never; | |
| + post?: never; | |
| + delete?: never; | |
| + options?: never; | |
| + head?: never; | |
| + patch?: never; | |
| + trace?: never; | |
| + }; | |
| + "/dashboard/projects/{projectId}/transactions": { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + /** List project transactions */ | |
| + get: operations["listProjectTransactions"]; | |
| + put?: never; | |
| + post?: never; | |
| + delete?: never; | |
| + options?: never; | |
| + head?: never; | |
| + patch?: never; | |
| + trace?: never; | |
| + }; | |
| + "/dashboard/projects/{projectId}/api-keys": { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + /** List API keys for a project */ | |
| + get: operations["listApiKeys"]; | |
| + put?: never; | |
| + /** | |
| + * Create a new API key | |
| + * @description Creates a new Unkey API key scoped to the project. | |
| + * The plaintext key is returned exactly once in the response. | |
| + */ | |
| + post: operations["createApiKey"]; | |
| + delete?: never; | |
| + options?: never; | |
| + head?: never; | |
| + patch?: never; | |
| + trace?: never; | |
| + }; | |
| + "/dashboard/projects/{projectId}/api-keys/{keyId}/disable": { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + get?: never; | |
| + put?: never; | |
| + /** | |
| + * Disable an API key | |
| + * @description Disables the key in Unkey (enabled: false). This is not a hard delete. | |
| + * The key can no longer be used for authentication after disabling. | |
| + */ | |
| + post: operations["disableApiKey"]; | |
| + delete?: never; | |
| + options?: never; | |
| + head?: never; | |
| + patch?: never; | |
| + trace?: never; | |
| + }; | |
| + "/dashboard/projects/{projectId}/users/{userAddress}/balances": { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + /** | |
| + * Get balances for a project user | |
| + * @description Returns per-token balances for the given user within the project context. | |
| + * Reuses the same balance aggregation as the product API. | |
| + */ | |
| + get: operations["getUserBalances"]; | |
| + put?: never; | |
| + post?: never; | |
| + delete?: never; | |
| + options?: never; | |
| + head?: never; | |
| + patch?: never; | |
| + trace?: never; | |
| }; | |
| - /** | |
| - * Get balances for a project user | |
| - * @description Returns per-token balances for the given user within the project context. | |
| - * Reuses the same balance aggregation as the product API. | |
| - */ | |
| - get: operations["getUserBalances"]; | |
| - put?: never; | |
| - post?: never; | |
| - delete?: never; | |
| - options?: never; | |
| - head?: never; | |
| - patch?: never; | |
| - trace?: never; | |
| - }; | |
| } | |
| export type webhooks = Record<string, never>; | |
| export interface components { | |
| - schemas: { | |
| - HealthData: { | |
| - /** @enum {string} */ | |
| - status: "ok" | "degraded"; | |
| - }; | |
| - BootstrapTenantRequest: { | |
| - org_name?: string; | |
| - org_slug?: string; | |
| - actor_email?: string; | |
| - }; | |
| - BootstrapTenantData: { | |
| - tenant: components["schemas"]["TenantResponse"]; | |
| - actor: components["schemas"]["ActorContext"]; | |
| - }; | |
| - TenantResponse: { | |
| - /** Format: uuid */ | |
| - id: string; | |
| - clerk_org_id: string; | |
| - name: string; | |
| - slug?: string; | |
| - }; | |
| - ActorContext: { | |
| - clerk_user_id: string; | |
| - role?: string; | |
| - }; | |
| - CreateProjectRequest: { | |
| - name: string; | |
| - description?: string; | |
| - }; | |
| - ProjectResponse: { | |
| - /** Format: uuid */ | |
| - id: string; | |
| - /** Format: uuid */ | |
| - tenant_id: string; | |
| - name: string; | |
| - slug: string; | |
| - description?: string; | |
| - /** Format: date-time */ | |
| - created_at: string; | |
| - }; | |
| - ProjectListData: { | |
| - items: components["schemas"]["ProjectResponse"][]; | |
| - }; | |
| - ProjectOverview: { | |
| - kpis: components["schemas"]["OverviewKpis"]; | |
| - pipeline: components["schemas"]["PipelineStage"][]; | |
| - recent_transactions: components["schemas"]["ProjectTransaction"][]; | |
| - }; | |
| - OverviewKpis: { | |
| - /** Format: int64 */ | |
| - total_transactions_24h: number; | |
| - /** Format: int64 */ | |
| - active_users_7d: number; | |
| - volume_24h: string; | |
| - }; | |
| - PipelineStage: { | |
| - status: components["schemas"]["DashboardStatus"]; | |
| - /** Format: int64 */ | |
| - count: number; | |
| - }; | |
| - ProjectUser: { | |
| - address: string; | |
| - public_key?: string; | |
| - /** Format: int64 */ | |
| - transaction_count: number; | |
| - primary_balance?: string; | |
| - /** Format: date-time */ | |
| - created_at: string; | |
| - /** Format: date-time */ | |
| - last_active_at?: string; | |
| - }; | |
| - ProjectUserListData: { | |
| - items: components["schemas"]["ProjectUser"][]; | |
| - next_cursor?: string; | |
| - }; | |
| - ProjectTransaction: { | |
| - id: string; | |
| - type: string; | |
| - status: components["schemas"]["DashboardStatus"]; | |
| - chain: string; | |
| - from_address?: string; | |
| - to_address?: string; | |
| - token_symbol?: string; | |
| - amount: string; | |
| - tx_hash?: string; | |
| - /** Format: date-time */ | |
| - created_at: string; | |
| - }; | |
| - ProjectTransactionListData: { | |
| - items: components["schemas"]["ProjectTransaction"][]; | |
| - next_cursor?: string; | |
| - }; | |
| - /** | |
| - * @description Collapsed dashboard status mapped from internal transaction states: | |
| - * accepted → submitted, prepared/proving/proved/broadcasting → processing, | |
| - * relayed/processed → complete, failed → failed. | |
| - * @enum {string} | |
| - */ | |
| - DashboardStatus: "submitted" | "processing" | "complete" | "failed"; | |
| - CreateApiKeyRequest: { | |
| - /** | |
| - * @description Human-readable label for the key | |
| - * @example Production API Key | |
| - */ | |
| - label?: string; | |
| - }; | |
| - ApiKeyResponse: { | |
| - /** @description Unkey key ID */ | |
| - id: string; | |
| - label: string; | |
| - /** @description Safe-to-display prefix of the key (first 8 chars) */ | |
| - prefix?: string; | |
| - enabled: boolean; | |
| - /** @description Clerk user ID of the key creator */ | |
| - created_by: string; | |
| - /** Format: date-time */ | |
| - created_at: string; | |
| - }; | |
| - /** @description Returned on create — includes the plaintext key shown once. */ | |
| - ApiKeyCreatedResponse: components["schemas"]["ApiKeyResponse"] & { | |
| - /** @description The full API key, shown exactly once */ | |
| - plaintext_key: string; | |
| - }; | |
| - ApiKeyListData: { | |
| - items: components["schemas"]["ApiKeyResponse"][]; | |
| - }; | |
| - DashboardUserBalancesData: { | |
| - balances: { | |
| - /** @description ERC-20 token contract address */ | |
| - token: string; | |
| - /** @description Aggregated token balance (numeric string) */ | |
| - amount: string; | |
| - }[]; | |
| - }; | |
| - ErrorResponse: { | |
| - error: { | |
| - code: string; | |
| - message: string; | |
| - }; | |
| - }; | |
| - }; | |
| - responses: { | |
| - /** @description Resource not found */ | |
| - NotFound: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| - }; | |
| - content: { | |
| - "application/json": components["schemas"]["ErrorResponse"]; | |
| - }; | |
| - }; | |
| - /** @description Conflict — e.g., key already disabled or project name taken */ | |
| - Conflict: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| - }; | |
| - content: { | |
| - "application/json": components["schemas"]["ErrorResponse"]; | |
| - }; | |
| - }; | |
| - }; | |
| - parameters: { | |
| - ProjectId: string; | |
| - UserAddress: string; | |
| - KeyId: string; | |
| - /** @description Opaque pagination cursor from a previous response */ | |
| - Cursor: string; | |
| - /** @description Maximum number of items to return */ | |
| - Limit: number; | |
| - }; | |
| - requestBodies: never; | |
| - headers: never; | |
| - pathItems: never; | |
| -} | |
| -export type $defs = Record<string, never>; | |
| -export interface operations { | |
| - health: { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| - }; | |
| - requestBody?: never; | |
| - responses: { | |
| - /** @description System healthy */ | |
| - 200: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| - }; | |
| - content: { | |
| - "application/json": { | |
| - data: components["schemas"]["HealthData"]; | |
| - }; | |
| - }; | |
| - }; | |
| - /** @description System degraded */ | |
| - 503: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| - }; | |
| - content: { | |
| - "application/json": { | |
| - data: components["schemas"]["HealthData"]; | |
| - }; | |
| - }; | |
| - }; | |
| - }; | |
| - }; | |
| - bootstrapTenant: { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| - }; | |
| - requestBody?: { | |
| - content: { | |
| - "application/json": components["schemas"]["BootstrapTenantRequest"]; | |
| - }; | |
| - }; | |
| - responses: { | |
| - /** @description Tenant bootstrapped */ | |
| - 200: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| + schemas: { | |
| + HealthData: { | |
| + /** @enum {string} */ | |
| + status: "ok" | "degraded"; | |
| }; | |
| - content: { | |
| - "application/json": { | |
| - data: components["schemas"]["BootstrapTenantData"]; | |
| - }; | |
| + BootstrapTenantRequest: { | |
| + org_name?: string; | |
| + org_slug?: string; | |
| + actor_email?: string; | |
| }; | |
| - }; | |
| - }; | |
| - }; | |
| - listProjects: { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| - }; | |
| - requestBody?: never; | |
| - responses: { | |
| - /** @description Project list */ | |
| - 200: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| + BootstrapTenantData: { | |
| + tenant: components["schemas"]["TenantResponse"]; | |
| + actor: components["schemas"]["ActorContext"]; | |
| }; | |
| - content: { | |
| - "application/json": { | |
| - data: components["schemas"]["ProjectListData"]; | |
| - }; | |
| + TenantResponse: { | |
| + /** Format: uuid */ | |
| + id: string; | |
| + clerk_org_id: string; | |
| + name: string; | |
| + slug?: string; | |
| + }; | |
| + ActorContext: { | |
| + clerk_user_id: string; | |
| + role?: string; | |
| + }; | |
| + CreateProjectRequest: { | |
| + name: string; | |
| + description?: string; | |
| + }; | |
| + ProjectResponse: { | |
| + /** Format: uuid */ | |
| + id: string; | |
| + /** Format: uuid */ | |
| + tenant_id: string; | |
| + /** @description Tenant slug for tenant-scoped project filtering */ | |
| + tenant_slug: string; | |
| + name: string; | |
| + slug: string; | |
| + description?: string; | |
| + /** Format: date-time */ | |
| + created_at: string; | |
| + }; | |
| + ProjectListData: { | |
| + items: components["schemas"]["ProjectResponse"][]; | |
| + }; | |
| + ProjectOverview: { | |
| + kpis: components["schemas"]["OverviewKpis"]; | |
| + pipeline: components["schemas"]["PipelineStage"][]; | |
| + recent_transactions: components["schemas"]["ProjectTransaction"][]; | |
| + }; | |
| + OverviewKpis: { | |
| + /** Format: int64 */ | |
| + total_transactions_24h: number; | |
| + /** Format: int64 */ | |
| + active_users_7d: number; | |
| + volume_24h: string; | |
| + }; | |
| + PipelineStage: { | |
| + status: components["schemas"]["DashboardStatus"]; | |
| + /** Format: int64 */ | |
| + count: number; | |
| + }; | |
| + ProjectUser: { | |
| + address: string; | |
| + public_key?: string; | |
| + /** Format: int64 */ | |
| + transaction_count: number; | |
| + primary_balance?: string; | |
| + /** Format: date-time */ | |
| + created_at: string; | |
| + /** Format: date-time */ | |
| + last_active_at?: string; | |
| + }; | |
| + ProjectUserListData: { | |
| + items: components["schemas"]["ProjectUser"][]; | |
| + next_cursor?: string; | |
| + }; | |
| + ProjectTransaction: { | |
| + id: string; | |
| + type: string; | |
| + status: components["schemas"]["DashboardStatus"]; | |
| + chain: string; | |
| + from_address?: string; | |
| + to_address?: string; | |
| + token_symbol?: string; | |
| + amount: string; | |
| + tx_hash?: string; | |
| + /** Format: date-time */ | |
| + created_at: string; | |
| + }; | |
| + ProjectTransactionListData: { | |
| + items: components["schemas"]["ProjectTransaction"][]; | |
| + next_cursor?: string; | |
| + }; | |
| + /** | |
| + * @description Collapsed dashboard status mapped from internal transaction states: | |
| + * accepted → submitted, prepared/proving/proved/broadcasting → processing, | |
| + * relayed/processed → complete, failed → failed. | |
| + * @enum {string} | |
| + */ | |
| + DashboardStatus: "submitted" | "processing" | "complete" | "failed"; | |
| + CreateApiKeyRequest: { | |
| + /** | |
| + * @description Human-readable label for the key | |
| + * @example Production API Key | |
| + */ | |
| + label?: string; | |
| + }; | |
| + ApiKeyResponse: { | |
| + /** @description Unkey key ID */ | |
| + id: string; | |
| + label: string; | |
| + /** @description Safe-to-display prefix of the key (first 8 chars) */ | |
| + prefix?: string; | |
| + enabled: boolean; | |
| + /** @description Clerk user ID of the key creator */ | |
| + created_by: string; | |
| + /** Format: date-time */ | |
| + created_at: string; | |
| + }; | |
| + /** @description Returned on create — includes the plaintext key shown once. */ | |
| + ApiKeyCreatedResponse: components["schemas"]["ApiKeyResponse"] & { | |
| + /** @description The full API key, shown exactly once */ | |
| + plaintext_key: string; | |
| + }; | |
| + ApiKeyListData: { | |
| + items: components["schemas"]["ApiKeyResponse"][]; | |
| + }; | |
| + DashboardUserBalancesData: { | |
| + balances: { | |
| + /** @description ERC-20 token contract address */ | |
| + token: string; | |
| + /** @description Aggregated token balance (numeric string) */ | |
| + amount: string; | |
| + }[]; | |
| + }; | |
| + ErrorResponse: { | |
| + error: { | |
| + code: string; | |
| + message: string; | |
| + }; | |
| }; | |
| - }; | |
| - }; | |
| - }; | |
| - createProject: { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path?: never; | |
| - cookie?: never; | |
| - }; | |
| - requestBody: { | |
| - content: { | |
| - "application/json": components["schemas"]["CreateProjectRequest"]; | |
| - }; | |
| - }; | |
| - responses: { | |
| - /** @description Project created */ | |
| - 201: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| - }; | |
| - content: { | |
| - "application/json": { | |
| - data: components["schemas"]["ProjectResponse"]; | |
| - }; | |
| - }; | |
| - }; | |
| - 409: components["responses"]["Conflict"]; | |
| - }; | |
| - }; | |
| - getProjectOverview: { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path: { | |
| - projectId: components["parameters"]["ProjectId"]; | |
| - }; | |
| - cookie?: never; | |
| }; | |
| - requestBody?: never; | |
| responses: { | |
| - /** @description Project overview */ | |
| - 200: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| - }; | |
| - content: { | |
| - "application/json": { | |
| - data: components["schemas"]["ProjectOverview"]; | |
| - }; | |
| - }; | |
| - }; | |
| - 404: components["responses"]["NotFound"]; | |
| + /** @description Resource not found */ | |
| + NotFound: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": components["schemas"]["ErrorResponse"]; | |
| + }; | |
| + }; | |
| + /** @description Conflict — e.g., key already disabled or project name taken */ | |
| + Conflict: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": components["schemas"]["ErrorResponse"]; | |
| + }; | |
| + }; | |
| }; | |
| - }; | |
| - listProjectUsers: { | |
| parameters: { | |
| - query?: { | |
| + ProjectId: string; | |
| + UserAddress: string; | |
| + KeyId: string; | |
| /** @description Opaque pagination cursor from a previous response */ | |
| - cursor?: components["parameters"]["Cursor"]; | |
| + Cursor: string; | |
| /** @description Maximum number of items to return */ | |
| - limit?: components["parameters"]["Limit"]; | |
| - }; | |
| - header?: never; | |
| - path: { | |
| - projectId: components["parameters"]["ProjectId"]; | |
| - }; | |
| - cookie?: never; | |
| - }; | |
| - requestBody?: never; | |
| - responses: { | |
| - /** @description Paginated user list */ | |
| - 200: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| - }; | |
| - content: { | |
| - "application/json": { | |
| - data: components["schemas"]["ProjectUserListData"]; | |
| - }; | |
| - }; | |
| - }; | |
| - 404: components["responses"]["NotFound"]; | |
| + Limit: number; | |
| }; | |
| - }; | |
| - listProjectTransactions: { | |
| - parameters: { | |
| - query?: { | |
| - /** @description Opaque pagination cursor from a previous response */ | |
| - cursor?: components["parameters"]["Cursor"]; | |
| - /** @description Maximum number of items to return */ | |
| - limit?: components["parameters"]["Limit"]; | |
| - }; | |
| - header?: never; | |
| - path: { | |
| - projectId: components["parameters"]["ProjectId"]; | |
| - }; | |
| - cookie?: never; | |
| + requestBodies: never; | |
| + headers: never; | |
| + pathItems: never; | |
| +} | |
| +export type $defs = Record<string, never>; | |
| +export interface operations { | |
| + health: { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + requestBody?: never; | |
| + responses: { | |
| + /** @description System healthy */ | |
| + 200: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": { | |
| + data: components["schemas"]["HealthData"]; | |
| + }; | |
| + }; | |
| + }; | |
| + /** @description System degraded */ | |
| + 503: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": { | |
| + data: components["schemas"]["HealthData"]; | |
| + }; | |
| + }; | |
| + }; | |
| + }; | |
| }; | |
| - requestBody?: never; | |
| - responses: { | |
| - /** @description Paginated transaction list */ | |
| - 200: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| - }; | |
| - content: { | |
| - "application/json": { | |
| - data: components["schemas"]["ProjectTransactionListData"]; | |
| - }; | |
| - }; | |
| - }; | |
| - 404: components["responses"]["NotFound"]; | |
| + bootstrapTenant: { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + requestBody?: { | |
| + content: { | |
| + "application/json": components["schemas"]["BootstrapTenantRequest"]; | |
| + }; | |
| + }; | |
| + responses: { | |
| + /** @description Tenant bootstrapped */ | |
| + 200: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": { | |
| + data: components["schemas"]["BootstrapTenantData"]; | |
| + }; | |
| + }; | |
| + }; | |
| + }; | |
| }; | |
| - }; | |
| - listApiKeys: { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path: { | |
| - projectId: components["parameters"]["ProjectId"]; | |
| - }; | |
| - cookie?: never; | |
| + listProjects: { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + requestBody?: never; | |
| + responses: { | |
| + /** @description Project list */ | |
| + 200: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": { | |
| + data: components["schemas"]["ProjectListData"]; | |
| + }; | |
| + }; | |
| + }; | |
| + }; | |
| }; | |
| - requestBody?: never; | |
| - responses: { | |
| - /** @description List of API keys */ | |
| - 200: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| - }; | |
| - content: { | |
| - "application/json": { | |
| - data: components["schemas"]["ApiKeyListData"]; | |
| - }; | |
| - }; | |
| - }; | |
| - 404: components["responses"]["NotFound"]; | |
| + createProject: { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path?: never; | |
| + cookie?: never; | |
| + }; | |
| + requestBody: { | |
| + content: { | |
| + "application/json": components["schemas"]["CreateProjectRequest"]; | |
| + }; | |
| + }; | |
| + responses: { | |
| + /** @description Project created */ | |
| + 201: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": { | |
| + data: components["schemas"]["ProjectResponse"]; | |
| + }; | |
| + }; | |
| + }; | |
| + 409: components["responses"]["Conflict"]; | |
| + }; | |
| }; | |
| - }; | |
| - createApiKey: { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path: { | |
| - projectId: components["parameters"]["ProjectId"]; | |
| - }; | |
| - cookie?: never; | |
| + getProjectOverview: { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path: { | |
| + projectId: components["parameters"]["ProjectId"]; | |
| + }; | |
| + cookie?: never; | |
| + }; | |
| + requestBody?: never; | |
| + responses: { | |
| + /** @description Project overview */ | |
| + 200: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": { | |
| + data: components["schemas"]["ProjectOverview"]; | |
| + }; | |
| + }; | |
| + }; | |
| + 404: components["responses"]["NotFound"]; | |
| + }; | |
| }; | |
| - requestBody?: { | |
| - content: { | |
| - "application/json": components["schemas"]["CreateApiKeyRequest"]; | |
| - }; | |
| + listProjectUsers: { | |
| + parameters: { | |
| + query?: { | |
| + /** @description Opaque pagination cursor from a previous response */ | |
| + cursor?: components["parameters"]["Cursor"]; | |
| + /** @description Maximum number of items to return */ | |
| + limit?: components["parameters"]["Limit"]; | |
| + }; | |
| + header?: never; | |
| + path: { | |
| + projectId: components["parameters"]["ProjectId"]; | |
| + }; | |
| + cookie?: never; | |
| + }; | |
| + requestBody?: never; | |
| + responses: { | |
| + /** @description Paginated user list */ | |
| + 200: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": { | |
| + data: components["schemas"]["ProjectUserListData"]; | |
| + }; | |
| + }; | |
| + }; | |
| + 404: components["responses"]["NotFound"]; | |
| + }; | |
| }; | |
| - responses: { | |
| - /** @description API key created */ | |
| - 201: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| - }; | |
| - content: { | |
| - "application/json": { | |
| - data: components["schemas"]["ApiKeyCreatedResponse"]; | |
| - }; | |
| - }; | |
| - }; | |
| - 404: components["responses"]["NotFound"]; | |
| + listProjectTransactions: { | |
| + parameters: { | |
| + query?: { | |
| + /** @description Opaque pagination cursor from a previous response */ | |
| + cursor?: components["parameters"]["Cursor"]; | |
| + /** @description Maximum number of items to return */ | |
| + limit?: components["parameters"]["Limit"]; | |
| + }; | |
| + header?: never; | |
| + path: { | |
| + projectId: components["parameters"]["ProjectId"]; | |
| + }; | |
| + cookie?: never; | |
| + }; | |
| + requestBody?: never; | |
| + responses: { | |
| + /** @description Paginated transaction list */ | |
| + 200: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": { | |
| + data: components["schemas"]["ProjectTransactionListData"]; | |
| + }; | |
| + }; | |
| + }; | |
| + 404: components["responses"]["NotFound"]; | |
| + }; | |
| }; | |
| - }; | |
| - disableApiKey: { | |
| - parameters: { | |
| - query?: never; | |
| - header?: never; | |
| - path: { | |
| - projectId: components["parameters"]["ProjectId"]; | |
| - keyId: components["parameters"]["KeyId"]; | |
| - }; | |
| - cookie?: never; | |
| + listApiKeys: { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path: { | |
| + projectId: components["parameters"]["ProjectId"]; | |
| + }; | |
| + cookie?: never; | |
| + }; | |
| + requestBody?: never; | |
| + responses: { | |
| + /** @description List of API keys */ | |
| + 200: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": { | |
| + data: components["schemas"]["ApiKeyListData"]; | |
| + }; | |
| + }; | |
| + }; | |
| + 404: components["responses"]["NotFound"]; | |
| + }; | |
| }; | |
| - requestBody?: never; | |
| - responses: { | |
| - /** @description Key disabled */ | |
| - 204: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| - }; | |
| - content?: never; | |
| - }; | |
| - 404: components["responses"]["NotFound"]; | |
| - 409: components["responses"]["Conflict"]; | |
| + createApiKey: { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path: { | |
| + projectId: components["parameters"]["ProjectId"]; | |
| + }; | |
| + cookie?: never; | |
| + }; | |
| + requestBody?: { | |
| + content: { | |
| + "application/json": components["schemas"]["CreateApiKeyRequest"]; | |
| + }; | |
| + }; | |
| + responses: { | |
| + /** @description API key created */ | |
| + 201: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": { | |
| + data: components["schemas"]["ApiKeyCreatedResponse"]; | |
| + }; | |
| + }; | |
| + }; | |
| + 404: components["responses"]["NotFound"]; | |
| + }; | |
| }; | |
| - }; | |
| - getUserBalances: { | |
| - parameters: { | |
| - query?: { | |
| - /** @description Optional ERC-20 token address to filter by */ | |
| - token?: string; | |
| - }; | |
| - header?: never; | |
| - path: { | |
| - projectId: components["parameters"]["ProjectId"]; | |
| - userAddress: components["parameters"]["UserAddress"]; | |
| - }; | |
| - cookie?: never; | |
| + disableApiKey: { | |
| + parameters: { | |
| + query?: never; | |
| + header?: never; | |
| + path: { | |
| + projectId: components["parameters"]["ProjectId"]; | |
| + keyId: components["parameters"]["KeyId"]; | |
| + }; | |
| + cookie?: never; | |
| + }; | |
| + requestBody?: never; | |
| + responses: { | |
| + /** @description Key disabled */ | |
| + 204: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content?: never; | |
| + }; | |
| + 404: components["responses"]["NotFound"]; | |
| + 409: components["responses"]["Conflict"]; | |
| + }; | |
| }; | |
| - requestBody?: never; | |
| - responses: { | |
| - /** @description User balances */ | |
| - 200: { | |
| - headers: { | |
| - [name: string]: unknown; | |
| - }; | |
| - content: { | |
| - "application/json": { | |
| - data: components["schemas"]["DashboardUserBalancesData"]; | |
| - }; | |
| - }; | |
| - }; | |
| - 404: components["responses"]["NotFound"]; | |
| + getUserBalances: { | |
| + parameters: { | |
| + query?: { | |
| + /** @description Optional ERC-20 token address to filter by */ | |
| + token?: string; | |
| + }; | |
| + header?: never; | |
| + path: { | |
| + projectId: components["parameters"]["ProjectId"]; | |
| + userAddress: components["parameters"]["UserAddress"]; | |
| + }; | |
| + cookie?: never; | |
| + }; | |
| + requestBody?: never; | |
| + responses: { | |
| + /** @description User balances */ | |
| + 200: { | |
| + headers: { | |
| + [name: string]: unknown; | |
| + }; | |
| + content: { | |
| + "application/json": { | |
| + data: components["schemas"]["DashboardUserBalancesData"]; | |
| + }; | |
| + }; | |
| + }; | |
| + 404: components["responses"]["NotFound"]; | |
| + }; | |
| }; | |
| - }; | |
| } | |
| diff --git a/frontend/dashboard/lib/api/ratelimit.ts b/frontend/dashboard/lib/api/ratelimit.ts | |
| new file mode 100644 | |
| index 0000000..c94de72 | |
| --- /dev/null | |
| +++ b/frontend/dashboard/lib/api/ratelimit.ts | |
| @@ -0,0 +1,23 @@ | |
| +import { Ratelimit } from "@upstash/ratelimit"; | |
| +import { Redis } from "@upstash/redis"; | |
| + | |
| +export const projectCreationLimiter = new Ratelimit({ | |
| + redis: Redis.fromEnv(), | |
| + limiter: Ratelimit.slidingWindow(10, "60s"), | |
| + analytics: true, | |
| + prefix: "ratelimit:project_create", | |
| +}); | |
| + | |
| +export const apiKeyCreationLimiter = new Ratelimit({ | |
| + redis: Redis.fromEnv(), | |
| + limiter: Ratelimit.slidingWindow(20, "60s"), | |
| + analytics: true, | |
| + prefix: "ratelimit:api_key_create", | |
| +}); | |
| + | |
| +export const apiKeyDisableLimiter = new Ratelimit({ | |
| + redis: Redis.fromEnv(), | |
| + limiter: Ratelimit.slidingWindow(20, "60s"), | |
| + analytics: true, | |
| + prefix: "ratelimit:api_key_disable", | |
| +}); | |
| diff --git a/frontend/dashboard/lib/api/resolvers.ts b/frontend/dashboard/lib/api/resolvers.ts | |
| index 7b47421..978d1a4 100644 | |
| --- a/frontend/dashboard/lib/api/resolvers.ts | |
| +++ b/frontend/dashboard/lib/api/resolvers.ts | |
| @@ -47,11 +47,15 @@ export const listProjects = cache(async (): Promise<Project[]> => { | |
| /** | |
| * Resolve a project by slug for the current tenant. Returns null when | |
| * there is no match — callers decide whether to 404 or redirect. | |
| + * FE-01 fix: filters by both slug AND tenant_slug to prevent cross-tenant project access. | |
| */ | |
| export const resolveProjectBySlug = cache( | |
| - async (slug: string): Promise<Project | null> => { | |
| + async (slug: string, tenantSlug: string): Promise<Project | null> => { | |
| const projects = await listProjects(); | |
| - return projects.find((p) => p.slug === slug) ?? null; | |
| + return ( | |
| + projects.find((p) => p.slug === slug && p.tenant_slug === tenantSlug) ?? | |
| + null | |
| + ); | |
| }, | |
| ); | |
| diff --git a/frontend/dashboard/package.json b/frontend/dashboard/package.json | |
| index 9785c2d..1021b06 100644 | |
| --- a/frontend/dashboard/package.json | |
| +++ b/frontend/dashboard/package.json | |
| @@ -12,12 +12,14 @@ | |
| "gen:dashboard": "openapi-typescript ../../protocol/openapi/dashboard.yaml -o lib/api/dashboard.d.ts" | |
| }, | |
| "dependencies": { | |
| - "@clerk/nextjs": "^6.17.0", | |
| + "@clerk/nextjs": "^6.39.3", | |
| "@clerk/ui": "^1.6.1", | |
| + "@upstash/ratelimit": "^2.0.5", | |
| + "@upstash/redis": "^1.34.5", | |
| "class-variance-authority": "^0.7.1", | |
| "clsx": "^2.1.1", | |
| "lucide-react": "^0.471.0", | |
| - "next": "15.5.4", | |
| + "next": "15.5.16", | |
| "openapi-fetch": "^0.13.8", | |
| "radix-ui": "^1.4.3", | |
| "react": "19.2.3", | |
| @@ -33,7 +35,7 @@ | |
| "@types/react": "^19.2.0", | |
| "@types/react-dom": "^19.2.0", | |
| "eslint": "^9.19.0", | |
| - "eslint-config-next": "15.5.4", | |
| + "eslint-config-next": "15.5.16", | |
| "openapi-typescript": "^7.6.1", | |
| "postcss": "^8.5.1", | |
| "tailwindcss": "^4.1.8", | |
| diff --git a/infra/fly/entrypoint.sh b/infra/fly/entrypoint.sh | |
| index 7775841..ffc2680 100755 | |
| --- a/infra/fly/entrypoint.sh | |
| +++ b/infra/fly/entrypoint.sh | |
| @@ -46,7 +46,7 @@ if [ -n "${FLY_APP_NAME:-}" ] && [ "$migrate_only" -eq 0 ]; then | |
| # ── Grafana Alloy ──────────────────────────────────────────────────────────── | |
| # Start Alloy in the background when Grafana Cloud credentials are present. | |
| # Alloy scrapes :9091 (metrics) and tails /var/log/unlink/engine.log (logs). | |
| - if [ -n "${GRAFANA_CLOUD_API_KEY:-}" ]; then | |
| + if [ -n "${GRAFANA_CLOUD_API_KEY:-}" ] && [ "${GRAFANA_CLOUD_API_KEY}" != "" ]; then | |
| : "${ENVIRONMENT_NAME:?ENVIRONMENT_NAME must be set when Grafana Cloud credentials are present}" | |
| # Bash as PID 1 does not forward signals by default. Trap SIGTERM/INT and | |
| # relay to the whole process group (engine + tee + alloy) so graceful | |
| @@ -70,7 +70,7 @@ if [ -n "${FLY_APP_NAME:-}" ] && [ "$migrate_only" -eq 0 ]; then | |
| # Shell stays as PID 1 (no exec) to keep Alloy alive for the engine's lifetime. | |
| # If Alloy crashes independently the engine continues running but goes dark on | |
| # telemetry — acceptable pre-GTM; restart policy handles recovery. | |
| - if [ -n "${GRAFANA_CLOUD_API_KEY:-}" ]; then | |
| + if [ -n "${GRAFANA_CLOUD_API_KEY:-}" ] && [ "${GRAFANA_CLOUD_API_KEY}" != "" ]; then | |
| gosu unlink /usr/local/bin/engine "$@" 2>&1 | tee /var/log/unlink/engine.log | |
| exit "${PIPESTATUS[0]}" | |
| fi | |
| diff --git a/infra/local/devstack.yml b/infra/local/devstack.yml | |
| index e238a83..1a4c31d 100644 | |
| --- a/infra/local/devstack.yml | |
| +++ b/infra/local/devstack.yml | |
| @@ -15,7 +15,7 @@ services: | |
| POSTGRES_PASSWORD: postgres | |
| POSTGRES_DB: unlink_dev | |
| ports: | |
| - - "${DEVSTACK_PG_PORT:-5432}:5432" | |
| + - "127.0.0.1:${DEVSTACK_PG_PORT:-5432}:5432" | |
| volumes: | |
| - devstack-pgdata:/var/lib/postgresql/data | |
| healthcheck: | |
| @@ -27,9 +27,9 @@ services: | |
| anvil: | |
| image: ghcr.io/foundry-rs/foundry:latest | |
| container_name: unlink-anvil | |
| - entrypoint: ["anvil", "--host", "0.0.0.0", "--port", "8545", "--silent"] | |
| + entrypoint: ["anvil", "--host", "127.0.0.1", "--port", "8545", "--silent"] | |
| ports: | |
| - - "8545:8545" | |
| + - "127.0.0.1:8545:8545" | |
| healthcheck: | |
| test: | |
| ["CMD-SHELL", "cast chain-id --rpc-url http://127.0.0.1:8545 || exit 1"] | |
| @@ -48,7 +48,7 @@ services: | |
| anvil: | |
| condition: service_healthy | |
| ports: | |
| - - "3030:3030" | |
| + - "127.0.0.1:3030:3030" | |
| profiles: ["docker-engine"] | |
| env_file: ../../.env | |
| environment: | |
| diff --git a/infra/local/docker-compose.yml b/infra/local/docker-compose.yml | |
| index 73e50ab..b00818f 100644 | |
| --- a/infra/local/docker-compose.yml | |
| +++ b/infra/local/docker-compose.yml | |
| @@ -2,7 +2,7 @@ services: | |
| prometheus: | |
| image: prom/prometheus:latest | |
| ports: | |
| - - "9090:9090" | |
| + - "127.0.0.1:9090:9090" | |
| volumes: | |
| - ./prometheus.yml:/etc/prometheus/prometheus.yml:ro | |
| - prometheus-data:/prometheus | |
| @@ -12,7 +12,7 @@ services: | |
| grafana: | |
| image: grafana/grafana:latest | |
| ports: | |
| - - "3001:3000" | |
| + - "127.0.0.1:3001:3000" | |
| environment: | |
| GF_AUTH_ANONYMOUS_ENABLED: "true" | |
| GF_AUTH_ANONYMOUS_ORG_ROLE: Admin | |
| @@ -27,7 +27,7 @@ services: | |
| loki: | |
| image: grafana/loki:latest | |
| ports: | |
| - - "3100:3100" | |
| + - "127.0.0.1:3100:3100" | |
| volumes: | |
| - loki-data:/loki | |
| diff --git a/infra/local/promtail.yml b/infra/local/promtail.yml | |
| index f2c9103..1c6d2e5 100644 | |
| --- a/infra/local/promtail.yml | |
| +++ b/infra/local/promtail.yml | |
| @@ -1,9 +1,10 @@ | |
| server: | |
| http_listen_port: 9080 | |
| + http_listen_address: 127.0.0.1 | |
| grpc_listen_port: 0 | |
| positions: | |
| - filename: /tmp/positions.yaml | |
| + filename: /var/lib/promtail/positions.yaml | |
| clients: | |
| - url: http://loki:3100/loki/api/v1/push | |
| diff --git a/justfile b/justfile | |
| index cd9df9c..74727a6 100644 | |
| --- a/justfile | |
| +++ b/justfile | |
| @@ -315,8 +315,9 @@ setup-zk *args: | |
| export-verifiers *args: | |
| bash protocol/zk/scripts/codegen.sh export-verifiers {{args}} | |
| -# Run ZK circuit tests | |
| +# Run ZK circuit tests (preflight checks verify artifacts first) | |
| test-zk: | |
| + bash protocol/zk/scripts/preflight-test-zk.sh | |
| pnpm --filter "@unlink/zk" test | |
| # Check downloaded ZK artifacts against the current circuit source. | |
| diff --git a/package.json b/package.json | |
| index b1bcdaf..1bed3b2 100644 | |
| --- a/package.json | |
| +++ b/package.json | |
| @@ -17,6 +17,9 @@ | |
| ], | |
| "patchedDependencies": { | |
| "@zk-kit/eddsa-poseidon@1.1.0": "patches/@zk-kit__eddsa-poseidon@1.1.0.patch" | |
| + }, | |
| + "overrides": { | |
| + "snarkjs": "^0.7.6" | |
| } | |
| }, | |
| "devDependencies": { | |
| diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml | |
| index 3c9c97c..0a123c5 100644 | |
| --- a/pnpm-lock.yaml | |
| +++ b/pnpm-lock.yaml | |
| @@ -4,6 +4,9 @@ settings: | |
| autoInstallPeers: true | |
| excludeLinksFromLockfile: false | |
| +overrides: | |
| + snarkjs: ^0.7.6 | |
| + | |
| patchedDependencies: | |
| '@zk-kit/eddsa-poseidon@1.1.0': | |
| hash: da03402d91a1d46b3566e739c80afe46ec934553887209991698a886268dbe10 | |
| @@ -44,11 +47,17 @@ importers: | |
| frontend/dashboard: | |
| dependencies: | |
| '@clerk/nextjs': | |
| - specifier: ^6.17.0 | |
| - version: 6.39.2(next@15.5.4(@babel/core@7.29.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| + specifier: ^6.39.3 | |
| + version: 6.39.3(next@15.5.16(@babel/core@7.29.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| '@clerk/ui': | |
| specifier: ^1.6.1 | |
| version: 1.6.1(@solana/web3.js@1.98.4(bufferutil@4.1.0)(typescript@5.9.3)(utf-8-validate@6.0.6))(@types/react@19.2.14)(bs58@6.0.0)(react-dom@19.2.3(react@19.2.3))(react-native@0.85.1(@babel/core@7.29.0)(@types/react@19.2.14)(bufferutil@4.1.0)(react@19.2.3)(utf-8-validate@6.0.6))(react@19.2.3)(typescript@5.9.3) | |
| + '@upstash/ratelimit': | |
| + specifier: ^2.0.5 | |
| + version: 2.0.8(@upstash/redis@1.38.0) | |
| + '@upstash/redis': | |
| + specifier: ^1.34.5 | |
| + version: 1.38.0 | |
| class-variance-authority: | |
| specifier: ^0.7.1 | |
| version: 0.7.1 | |
| @@ -59,8 +68,8 @@ importers: | |
| specifier: ^0.471.0 | |
| version: 0.471.2(react@19.2.3) | |
| next: | |
| - specifier: 15.5.4 | |
| - version: 15.5.4(@babel/core@7.29.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| + specifier: 15.5.16 | |
| + version: 15.5.16(@babel/core@7.29.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| openapi-fetch: | |
| specifier: ^0.13.8 | |
| version: 0.13.8 | |
| @@ -102,8 +111,8 @@ importers: | |
| specifier: ^9.19.0 | |
| version: 9.39.4(jiti@2.6.1) | |
| eslint-config-next: | |
| - specifier: 15.5.4 | |
| - version: 15.5.4(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) | |
| + specifier: 15.5.16 | |
| + version: 15.5.16(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) | |
| openapi-typescript: | |
| specifier: ^7.6.1 | |
| version: 7.13.0(typescript@5.9.3) | |
| @@ -120,52 +129,6 @@ importers: | |
| specifier: ^5.9.2 | |
| version: 5.9.3 | |
| - frontend/landing: | |
| - dependencies: | |
| - clsx: | |
| - specifier: ^2.1.1 | |
| - version: 2.1.1 | |
| - lucide-react: | |
| - specifier: ^1.14.0 | |
| - version: 1.14.0(react@19.2.6) | |
| - next: | |
| - specifier: 16.2.6 | |
| - version: 16.2.6(@babel/core@7.29.0)(react-dom@19.2.6(react@19.2.6))(react@19.2.6) | |
| - react: | |
| - specifier: 19.2.6 | |
| - version: 19.2.6 | |
| - react-dom: | |
| - specifier: 19.2.6 | |
| - version: 19.2.6(react@19.2.6) | |
| - devDependencies: | |
| - '@tailwindcss/postcss': | |
| - specifier: ^4.3.0 | |
| - version: 4.3.0 | |
| - '@types/node': | |
| - specifier: ^25.7.0 | |
| - version: 25.7.0 | |
| - '@types/react': | |
| - specifier: ^19.2.14 | |
| - version: 19.2.14 | |
| - '@types/react-dom': | |
| - specifier: ^19.2.3 | |
| - version: 19.2.3(@types/react@19.2.14) | |
| - eslint: | |
| - specifier: ^9.39.4 | |
| - version: 9.39.4(jiti@2.6.1) | |
| - eslint-config-next: | |
| - specifier: 16.2.6 | |
| - version: 16.2.6(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) | |
| - postcss: | |
| - specifier: ^8.5.14 | |
| - version: 8.5.14 | |
| - tailwindcss: | |
| - specifier: ^4.3.0 | |
| - version: 4.3.0 | |
| - typescript: | |
| - specifier: ^5.9.3 | |
| - version: 5.9.3 | |
| - | |
| protocol/cli: | |
| dependencies: | |
| '@unlink-xyz/sdk': | |
| @@ -180,7 +143,7 @@ importers: | |
| devDependencies: | |
| tsup: | |
| specifier: ^8.0.0 | |
| - version: 8.5.1(jiti@2.6.1)(postcss@8.5.14)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3) | |
| + version: 8.5.1(jiti@2.6.1)(postcss@8.5.8)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3) | |
| tsx: | |
| specifier: ^4.0.0 | |
| version: 4.21.0 | |
| @@ -213,7 +176,7 @@ importers: | |
| devDependencies: | |
| '@vitest/coverage-v8': | |
| specifier: ^3.2.4 | |
| - version: 3.2.4(vitest@3.2.4(@types/node@25.7.0)(jiti@2.6.1)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) | |
| + version: 3.2.4(vitest@3.2.4(@types/node@25.5.0)(jiti@2.6.1)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) | |
| '@zk-kit/eddsa-poseidon': | |
| specifier: ^1.1.0 | |
| version: 1.1.0(patch_hash=da03402d91a1d46b3566e739c80afe46ec934553887209991698a886268dbe10) | |
| @@ -225,10 +188,10 @@ importers: | |
| version: 7.13.0(typescript@5.9.3) | |
| tsup: | |
| specifier: ^8.0.0 | |
| - version: 8.5.1(jiti@2.6.1)(postcss@8.5.14)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3) | |
| + version: 8.5.1(jiti@2.6.1)(postcss@8.5.8)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3) | |
| vitest: | |
| specifier: ^3.0.0 | |
| - version: 3.2.4(@types/node@25.7.0)(jiti@2.6.1)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| + version: 3.2.4(@types/node@25.5.0)(jiti@2.6.1)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| protocol/zk: | |
| dependencies: | |
| @@ -239,7 +202,7 @@ importers: | |
| specifier: ^2.0.5 | |
| version: 2.0.5 | |
| snarkjs: | |
| - specifier: ^0.7.5 | |
| + specifier: ^0.7.6 | |
| version: 0.7.6 | |
| devDependencies: | |
| '@zk-kit/eddsa-poseidon': | |
| @@ -253,7 +216,7 @@ importers: | |
| version: 4.21.0 | |
| vitest: | |
| specifier: ^4.0.1 | |
| - version: 4.1.0(@types/node@25.7.0)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(vite@8.0.0(@types/node@25.7.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) | |
| + version: 4.1.0(@types/node@25.5.0)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(vite@8.0.0(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) | |
| packages: | |
| @@ -349,12 +312,12 @@ packages: | |
| resolution: {integrity: sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==} | |
| engines: {node: '>=18'} | |
| - '@clerk/backend@2.33.2': | |
| - resolution: {integrity: sha512-5nNPTdSLCTt7yVvMdd5CoEYZXVQhA9i0C50PxmAOjApYDIEfASedP9KXRb+YARiDrOSHQg0qFJhWUnujaG3hpw==} | |
| + '@clerk/backend@2.33.3': | |
| + resolution: {integrity: sha512-cgkFVEYFG2nZn4QDuYBhiAwPtMdo8Yj7DAtq/SBQ5C/ainh3uxNRDgUj4bFn52qJkWLiCkraYJIw1b8dEUbUBg==} | |
| engines: {node: '>=18.17.0'} | |
| - '@clerk/clerk-react@5.61.5': | |
| - resolution: {integrity: sha512-MKVEsvRR47WlizFki5BPjLIm1TPbJju4m2CNJGzrRqhEMide0Yjm4DGYfh/r2k/uFjOGMWfSJ7EToM1y2AQ5rg==} | |
| + '@clerk/clerk-react@5.61.6': | |
| + resolution: {integrity: sha512-OiyBlrnkRr9IhZtPd7EwlzhYScBpvNKJ8lgg7Uw6JElzJYz854IeQaez5mAfpiib3LcW/Dn53E2PQhagcuLJ3Q==} | |
| engines: {node: '>=18.17.0'} | |
| peerDependencies: | |
| react: ^18.0.0 || ~19.0.3 || ~19.1.4 || ~19.2.3 || ~19.3.0-0 | |
| @@ -364,16 +327,16 @@ packages: | |
| resolution: {integrity: sha512-S97xj8Q3RreP+xwyF0QszEMmp2hHBOnuRz18uD3H3c2fZq+9bky579oVmAmnA/ox9sR/CGt6m7VMfzIcAuwnzA==} | |
| engines: {node: '>=20.9.0'} | |
| - '@clerk/nextjs@6.39.2': | |
| - resolution: {integrity: sha512-NTAgvhpntCdQD4KR+4f/KFs8cqd6oyzoE73AoO9w0xKoJbTB8IIIPG+CtdIw+mx7z4JqbQATKWZbMPGeZbZYCw==} | |
| + '@clerk/nextjs@6.39.3': | |
| + resolution: {integrity: sha512-a64lJ1IlV1uA7eEe8DOx+v2bkNOhnTsNlB5THP/xkHvynHqZhc74Yt05sm1vTniWwhJpJspAZ95pCWUX/RVZ2Q==} | |
| engines: {node: '>=18.17.0'} | |
| peerDependencies: | |
| next: ^13.5.7 || ^14.2.25 || ^15.2.3 || ^16 | |
| react: ^18.0.0 || ~19.0.3 || ~19.1.4 || ~19.2.3 || ~19.3.0-0 | |
| react-dom: ^18.0.0 || ~19.0.3 || ~19.1.4 || ~19.2.3 || ~19.3.0-0 | |
| - '@clerk/shared@3.47.4': | |
| - resolution: {integrity: sha512-0O5/zgB5SO26PKarAIw7uj4j+4JsnT2/uiJ7SPI3LQMb62sM+AjDlVadcXuYc+4sY6w1szrAIVepI5Bkv57hnQ==} | |
| + '@clerk/shared@3.47.5': | |
| + resolution: {integrity: sha512-rDVe73/VN2NZXhtrLRHshkUpQDrevAqDRxeXUl2M0IBEBkcl+VMHlV7fep53cVWo0b3gIqLk82pmmi+WoyF/xg==} | |
| engines: {node: '>=18.17.0'} | |
| peerDependencies: | |
| react: ^18.0.0 || ~19.0.3 || ~19.1.4 || ~19.2.3 || ~19.3.0-0 | |
| @@ -396,8 +359,8 @@ packages: | |
| react-dom: | |
| optional: true | |
| - '@clerk/types@4.101.22': | |
| - resolution: {integrity: sha512-74hV9MMw9MzOOSuJNJMFP95XZ2jDfPS1v3pfALS3rSQa+h/lNREU+fLGArzYckEpqNtuF6xy0odg9YqF5BLNhA==} | |
| + '@clerk/types@4.101.23': | |
| + resolution: {integrity: sha512-t5ypYYDkT5TPaNIDjLnYk9GpkJgwNTBiS7h6FuUTjoySQtf7amNDS1A1eOu7NOcVpqiSeKg+0wzGxxcre00kMA==} | |
| engines: {node: '>=18.17.0'} | |
| '@clerk/ui@1.6.1': | |
| @@ -1103,118 +1066,60 @@ packages: | |
| '@napi-rs/wasm-runtime@1.1.1': | |
| resolution: {integrity: sha512-p64ah1M1ld8xjWv3qbvFwHiFVWrq1yFvV4f7w+mzaqiR4IlSgkqhcRdHwsGgomwzBH51sRY4NEowLxnaBjcW/A==} | |
| - '@next/env@15.5.4': | |
| - resolution: {integrity: sha512-27SQhYp5QryzIT5uO8hq99C69eLQ7qkzkDPsk3N+GuS2XgOgoYEeOav7Pf8Tn4drECOVDsDg8oj+/DVy8qQL2A==} | |
| + '@next/env@15.5.16': | |
| + resolution: {integrity: sha512-9QMKolCl+JnJtaRAQSXy4RQrhgfe8W7/G1+Hl3QSB/HZY7zQMzTwPDdTRwwio8BS96ps1MHpHhbS8qxoNV3JIQ==} | |
| - '@next/env@16.2.6': | |
| - resolution: {integrity: sha512-gd8HoHN4ufj73WmR3JmVolrpJR47ILK6LouP5xElPglaVxir6e1a7VzvTvDWkOoPXT9rkkTzyCxBu4yeZfZwcw==} | |
| + '@next/eslint-plugin-next@15.5.16': | |
| + resolution: {integrity: sha512-pXa+4smRrgzea94YeAR8txf2CYg4pc1HkcoLUigrE5a0j70dVdUYMKfsOGCe8ulDSLvqnm2keMoxKss5RxHokg==} | |
| - '@next/eslint-plugin-next@15.5.4': | |
| - resolution: {integrity: sha512-SR1vhXNNg16T4zffhJ4TS7Xn7eq4NfKfcOsRwea7RIAHrjRpI9ALYbamqIJqkAhowLlERffiwk0FMvTLNdnVtw==} | |
| - | |
| - '@next/eslint-plugin-next@16.2.6': | |
| - resolution: {integrity: sha512-Z8l6o4JWKUl755x4R+wogD86KPeU+Ckw4K+SYG4kHeOJtRenDeK+OSbGcqZpDtbwn9DsJVdir2UxmwXuinUbUw==} | |
| - | |
| - '@next/swc-darwin-arm64@15.5.4': | |
| - resolution: {integrity: sha512-nopqz+Ov6uvorej8ndRX6HlxCYWCO3AHLfKK2TYvxoSB2scETOcfm/HSS3piPqc3A+MUgyHoqE6je4wnkjfrOA==} | |
| + '@next/swc-darwin-arm64@15.5.16': | |
| + resolution: {integrity: sha512-wzdER4JZj+31vNkhaZ1Ght3IsNI8DMwj7VqadfIOqJB5sh8FiOqNSopYADQn6mgEPomzDd/DHqBcfo2fmVMYtg==} | |
| engines: {node: '>= 10'} | |
| cpu: [arm64] | |
| os: [darwin] | |
| - '@next/swc-darwin-arm64@16.2.6': | |
| - resolution: {integrity: sha512-ZJGkkcNfYgrrMkqOdZ7zoLa1TOy0qpcMfk/z4Mh/FKUz40gVO+HNQWqmLxf67Z5WB64DRp0dhEbyHfel+6sJUg==} | |
| - engines: {node: '>= 10'} | |
| - cpu: [arm64] | |
| - os: [darwin] | |
| - | |
| - '@next/swc-darwin-x64@15.5.4': | |
| - resolution: {integrity: sha512-QOTCFq8b09ghfjRJKfb68kU9k2K+2wsC4A67psOiMn849K9ZXgCSRQr0oVHfmKnoqCbEmQWG1f2h1T2vtJJ9mA==} | |
| - engines: {node: '>= 10'} | |
| - cpu: [x64] | |
| - os: [darwin] | |
| - | |
| - '@next/swc-darwin-x64@16.2.6': | |
| - resolution: {integrity: sha512-v/YLBHIY132Ced3puBJ7YJKw1lqsCrgcNo2aRJlCEyQrrCeRJlvGlnmxhPxNQI3KE3N1DN5r9TPNPvka3nq5RQ==} | |
| + '@next/swc-darwin-x64@15.5.16': | |
| + resolution: {integrity: sha512-PPTo+cvcanxkuDEuDyZGk28ntmu0WjfkxqlG7hw9Mhsiribs4x1C6h2Culn0cJKqsne1gFjjZRK3ax7WYlSxgg==} | |
| engines: {node: '>= 10'} | |
| cpu: [x64] | |
| os: [darwin] | |
| - '@next/swc-linux-arm64-gnu@15.5.4': | |
| - resolution: {integrity: sha512-eRD5zkts6jS3VfE/J0Kt1VxdFqTnMc3QgO5lFE5GKN3KDI/uUpSyK3CjQHmfEkYR4wCOl0R0XrsjpxfWEA++XA==} | |
| + '@next/swc-linux-arm64-gnu@15.5.16': | |
| + resolution: {integrity: sha512-Jl0IL9P7S8uNl5oI1TqrQmfmLp7OqjWM58000pVnUVIsHrvPP6m9QDW/uNWYUbmd+8IYvc6MTeZKICstBMBpew==} | |
| engines: {node: '>= 10'} | |
| cpu: [arm64] | |
| os: [linux] | |
| libc: [glibc] | |
| - '@next/swc-linux-arm64-gnu@16.2.6': | |
| - resolution: {integrity: sha512-RPOvqlYBbcQjkz9VQQDZ2T2bARIjXZV1KFlt+V2Mr6SW/e4I9fcKsaA0hdyf2FHoTlsV2xnBd5Y912rP/1Ce6w==} | |
| - engines: {node: '>= 10'} | |
| - cpu: [arm64] | |
| - os: [linux] | |
| - libc: [glibc] | |
| - | |
| - '@next/swc-linux-arm64-musl@15.5.4': | |
| - resolution: {integrity: sha512-TOK7iTxmXFc45UrtKqWdZ1shfxuL4tnVAOuuJK4S88rX3oyVV4ZkLjtMT85wQkfBrOOvU55aLty+MV8xmcJR8A==} | |
| - engines: {node: '>= 10'} | |
| - cpu: [arm64] | |
| - os: [linux] | |
| - libc: [musl] | |
| - | |
| - '@next/swc-linux-arm64-musl@16.2.6': | |
| - resolution: {integrity: sha512-URUTu1+dMkxJsPFgm+OeEvq9wf5sujw0EvgYy80TDGHTSLTnIHeqb0Eu8A3sC95IRgjejQL+kC4mw+4yPxiAXA==} | |
| + '@next/swc-linux-arm64-musl@15.5.16': | |
| + resolution: {integrity: sha512-Zf0BIqv/o5uOWfyRkzgGhyV2Tky7HLt0bG+w7XWdaU1JpyX0tltM3TrSfa/Y9c597SJG4CzN47+u2InhgZZ4vg==} | |
| engines: {node: '>= 10'} | |
| cpu: [arm64] | |
| os: [linux] | |
| libc: [musl] | |
| - '@next/swc-linux-x64-gnu@15.5.4': | |
| - resolution: {integrity: sha512-7HKolaj+481FSW/5lL0BcTkA4Ueam9SPYWyN/ib/WGAFZf0DGAN8frNpNZYFHtM4ZstrHZS3LY3vrwlIQfsiMA==} | |
| - engines: {node: '>= 10'} | |
| - cpu: [x64] | |
| - os: [linux] | |
| - libc: [glibc] | |
| - | |
| - '@next/swc-linux-x64-gnu@16.2.6': | |
| - resolution: {integrity: sha512-DOj182mPV8G3UkrayLoREM5YEYI+Dk5wv7Ox9xl1fFibAELEsFD0lDPfHIeILlutMMfdyhlzYPELG3peuKaurw==} | |
| + '@next/swc-linux-x64-gnu@15.5.16': | |
| + resolution: {integrity: sha512-HCDDU1TRLeUDV180QQTWrs5Oa4lIcI7XH9nF0UVUVmYLN/boZ6LqyFtm3814gc1fv+lOVyKaw5B6bVC9BpXTSQ==} | |
| engines: {node: '>= 10'} | |
| cpu: [x64] | |
| os: [linux] | |
| libc: [glibc] | |
| - '@next/swc-linux-x64-musl@15.5.4': | |
| - resolution: {integrity: sha512-nlQQ6nfgN0nCO/KuyEUwwOdwQIGjOs4WNMjEUtpIQJPR2NUfmGpW2wkJln1d4nJ7oUzd1g4GivH5GoEPBgfsdw==} | |
| + '@next/swc-linux-x64-musl@15.5.16': | |
| + resolution: {integrity: sha512-kvXUY1dn5wxKuMkXxQRUbPjEnKxW1PR9uKOm0zpIpj3574+cFfaePhYFmBVtrOuwt+w34OdDzNaJr5Iixf+HBQ==} | |
| engines: {node: '>= 10'} | |
| cpu: [x64] | |
| os: [linux] | |
| libc: [musl] | |
| - '@next/swc-linux-x64-musl@16.2.6': | |
| - resolution: {integrity: sha512-HKQ5SP/V/ub73UvF7n/zeJlxk2kLmtL7Wzrg4WfmkjmNos5onJ2tKu7yZOPdL18A6Svfn3max29ym+ry7NkK4g==} | |
| - engines: {node: '>= 10'} | |
| - cpu: [x64] | |
| - os: [linux] | |
| - libc: [musl] | |
| - | |
| - '@next/swc-win32-arm64-msvc@15.5.4': | |
| - resolution: {integrity: sha512-PcR2bN7FlM32XM6eumklmyWLLbu2vs+D7nJX8OAIoWy69Kef8mfiN4e8TUv2KohprwifdpFKPzIP1njuCjD0YA==} | |
| + '@next/swc-win32-arm64-msvc@15.5.16': | |
| + resolution: {integrity: sha512-zpOQuF+eyENMXRjglp2hZCIrUjTdO37suEBnDn1mX4PXSuetXZDMLpjKOh4dYSw3SiDTnOoOUwBl5i5Elr6nnQ==} | |
| engines: {node: '>= 10'} | |
| cpu: [arm64] | |
| os: [win32] | |
| - '@next/swc-win32-arm64-msvc@16.2.6': | |
| - resolution: {integrity: sha512-LZXpTlPyS5v7HhSmnvsLGP3iIYgYOBnc8r8ArlT55sGHV89bR2HlDdBjWQ+PY6SJMmk8TuVGFuxalnP3k/0Dwg==} | |
| - engines: {node: '>= 10'} | |
| - cpu: [arm64] | |
| - os: [win32] | |
| - | |
| - '@next/swc-win32-x64-msvc@15.5.4': | |
| - resolution: {integrity: sha512-1ur2tSHZj8Px/KMAthmuI9FMp/YFusMMGoRNJaRZMOlSkgvLjzosSdQI0cJAKogdHl3qXUQKL9MGaYvKwA7DXg==} | |
| - engines: {node: '>= 10'} | |
| - cpu: [x64] | |
| - os: [win32] | |
| - | |
| - '@next/swc-win32-x64-msvc@16.2.6': | |
| - resolution: {integrity: sha512-F0+4i0h9J6C4eE3EAPWsoCk7UW/dbzOjyzxY0qnDUOYFu6FFmdZ6l97/XdV3/Nz3VYyO7UWjyEJUXkGqcoXfMA==} | |
| + '@next/swc-win32-x64-msvc@15.5.16': | |
| + resolution: {integrity: sha512-LnwKYpiSmIzXlTq76hMeeIzZoDcFwu848p6H+QBkGFJIbZphgzNUPdHruJcHM/bFnaFeco0l1Frie5I27VKglA==} | |
| engines: {node: '>= 10'} | |
| cpu: [x64] | |
| os: [win32] | |
| @@ -2445,69 +2350,36 @@ packages: | |
| '@tailwindcss/node@4.2.2': | |
| resolution: {integrity: sha512-pXS+wJ2gZpVXqFaUEjojq7jzMpTGf8rU6ipJz5ovJV6PUGmlJ+jvIwGrzdHdQ80Sg+wmQxUFuoW1UAAwHNEdFA==} | |
| - '@tailwindcss/node@4.3.0': | |
| - resolution: {integrity: sha512-aFb4gUhFOgdh9AXo4IzBEOzBkkAxm9VigwDJnMIYv3lcfXCJVesNfbEaBl4BNgVRyid92AmdviqwBUBRKSeY3g==} | |
| - | |
| '@tailwindcss/oxide-android-arm64@4.2.2': | |
| resolution: {integrity: sha512-dXGR1n+P3B6748jZO/SvHZq7qBOqqzQ+yFrXpoOWWALWndF9MoSKAT3Q0fYgAzYzGhxNYOoysRvYlpixRBBoDg==} | |
| engines: {node: '>= 20'} | |
| cpu: [arm64] | |
| os: [android] | |
| - '@tailwindcss/oxide-android-arm64@4.3.0': | |
| - resolution: {integrity: sha512-TJPiq67tKlLuObP6RkwvVGDoxCMBVtDgKkLfa/uyj7/FyxvQwHS+UOnVrXXgbEsfUaMgiVvC4KbJnRr26ho4Ng==} | |
| - engines: {node: '>= 20'} | |
| - cpu: [arm64] | |
| - os: [android] | |
| - | |
| '@tailwindcss/oxide-darwin-arm64@4.2.2': | |
| resolution: {integrity: sha512-iq9Qjr6knfMpZHj55/37ouZeykwbDqF21gPFtfnhCCKGDcPI/21FKC9XdMO/XyBM7qKORx6UIhGgg6jLl7BZlg==} | |
| engines: {node: '>= 20'} | |
| cpu: [arm64] | |
| os: [darwin] | |
| - '@tailwindcss/oxide-darwin-arm64@4.3.0': | |
| - resolution: {integrity: sha512-oMN/WZRb+SO37BmUElEgeEWuU8E/HXRkiODxJxLe1UTHVXLrdVSgfaJV7pSlhRGMSOiXLuxTIjfsF3wYvz8cgQ==} | |
| - engines: {node: '>= 20'} | |
| - cpu: [arm64] | |
| - os: [darwin] | |
| - | |
| '@tailwindcss/oxide-darwin-x64@4.2.2': | |
| resolution: {integrity: sha512-BlR+2c3nzc8f2G639LpL89YY4bdcIdUmiOOkv2GQv4/4M0vJlpXEa0JXNHhCHU7VWOKWT/CjqHdTP8aUuDJkuw==} | |
| engines: {node: '>= 20'} | |
| cpu: [x64] | |
| os: [darwin] | |
| - '@tailwindcss/oxide-darwin-x64@4.3.0': | |
| - resolution: {integrity: sha512-N6CUmu4a6bKVADfw77p+iw6Yd9Q3OBhe0veaDX+QazfuVYlQsHfDgxBrsjQ/IW+zywL8mTrNd0SdJT/zgtvMdA==} | |
| - engines: {node: '>= 20'} | |
| - cpu: [x64] | |
| - os: [darwin] | |
| - | |
| '@tailwindcss/oxide-freebsd-x64@4.2.2': | |
| resolution: {integrity: sha512-YUqUgrGMSu2CDO82hzlQ5qSb5xmx3RUrke/QgnoEx7KvmRJHQuZHZmZTLSuuHwFf0DJPybFMXMYf+WJdxHy/nQ==} | |
| engines: {node: '>= 20'} | |
| cpu: [x64] | |
| os: [freebsd] | |
| - '@tailwindcss/oxide-freebsd-x64@4.3.0': | |
| - resolution: {integrity: sha512-zDL5hBkQdH5C6MpqbK3gQAgP80tsMwSI26vjOzjJtNCMUo0lFgOItzHKBIupOZNQxt3ouPH7RPhvNhiTfCe5CQ==} | |
| - engines: {node: '>= 20'} | |
| - cpu: [x64] | |
| - os: [freebsd] | |
| - | |
| '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.2': | |
| resolution: {integrity: sha512-FPdhvsW6g06T9BWT0qTwiVZYE2WIFo2dY5aCSpjG/S/u1tby+wXoslXS0kl3/KXnULlLr1E3NPRRw0g7t2kgaQ==} | |
| engines: {node: '>= 20'} | |
| cpu: [arm] | |
| os: [linux] | |
| - '@tailwindcss/oxide-linux-arm-gnueabihf@4.3.0': | |
| - resolution: {integrity: sha512-R06HdNi7A7OEoMsf6d4tjZ71RCWnZQPHj2mnotSFURjNLdBC+cIgXQ7l81CqeoiQftjf6OOblxXMInMgN2VzMA==} | |
| - engines: {node: '>= 20'} | |
| - cpu: [arm] | |
| - os: [linux] | |
| - | |
| '@tailwindcss/oxide-linux-arm64-gnu@4.2.2': | |
| resolution: {integrity: sha512-4og1V+ftEPXGttOO7eCmW7VICmzzJWgMx+QXAJRAhjrSjumCwWqMfkDrNu1LXEQzNAwz28NCUpucgQPrR4S2yw==} | |
| engines: {node: '>= 20'} | |
| @@ -2515,13 +2387,6 @@ packages: | |
| os: [linux] | |
| libc: [glibc] | |
| - '@tailwindcss/oxide-linux-arm64-gnu@4.3.0': | |
| - resolution: {integrity: sha512-qTJHELX8jetjhRQHCLilkVLmybpzNQAtaI/gaoVoidn/ufbNDbAo8KlK2J+yPoc8wQxvDxCmh/5lr8nC1+lTbg==} | |
| - engines: {node: '>= 20'} | |
| - cpu: [arm64] | |
| - os: [linux] | |
| - libc: [glibc] | |
| - | |
| '@tailwindcss/oxide-linux-arm64-musl@4.2.2': | |
| resolution: {integrity: sha512-oCfG/mS+/+XRlwNjnsNLVwnMWYH7tn/kYPsNPh+JSOMlnt93mYNCKHYzylRhI51X+TbR+ufNhhKKzm6QkqX8ag==} | |
| engines: {node: '>= 20'} | |
| @@ -2529,13 +2394,6 @@ packages: | |
| os: [linux] | |
| libc: [musl] | |
| - '@tailwindcss/oxide-linux-arm64-musl@4.3.0': | |
| - resolution: {integrity: sha512-Z6sukiQsngnWO+l39X4pPbiWT81IC+PLKF+PHxIlyZbGNb9MODfYlXEVlFvej5BOZInWX01kVyzeLvHsXhfczQ==} | |
| - engines: {node: '>= 20'} | |
| - cpu: [arm64] | |
| - os: [linux] | |
| - libc: [musl] | |
| - | |
| '@tailwindcss/oxide-linux-x64-gnu@4.2.2': | |
| resolution: {integrity: sha512-rTAGAkDgqbXHNp/xW0iugLVmX62wOp2PoE39BTCGKjv3Iocf6AFbRP/wZT/kuCxC9QBh9Pu8XPkv/zCZB2mcMg==} | |
| engines: {node: '>= 20'} | |
| @@ -2543,13 +2401,6 @@ packages: | |
| os: [linux] | |
| libc: [glibc] | |
| - '@tailwindcss/oxide-linux-x64-gnu@4.3.0': | |
| - resolution: {integrity: sha512-DRNdQRpSGzRGfARVuVkxvM8Q12nh19l4BF/G7zGA1oe+9wcC6saFBHTISrpIcKzhiXtSrlSrluCfvMuledoCTQ==} | |
| - engines: {node: '>= 20'} | |
| - cpu: [x64] | |
| - os: [linux] | |
| - libc: [glibc] | |
| - | |
| '@tailwindcss/oxide-linux-x64-musl@4.2.2': | |
| resolution: {integrity: sha512-XW3t3qwbIwiSyRCggeO2zxe3KWaEbM0/kW9e8+0XpBgyKU4ATYzcVSMKteZJ1iukJ3HgHBjbg9P5YPRCVUxlnQ==} | |
| engines: {node: '>= 20'} | |
| @@ -2557,13 +2408,6 @@ packages: | |
| os: [linux] | |
| libc: [musl] | |
| - '@tailwindcss/oxide-linux-x64-musl@4.3.0': | |
| - resolution: {integrity: sha512-Z0IADbDo8bh6I7h2IQMx601AdXBLfFpEdUotft86evd/8ZPflZe9COPO8Q1vw+pfLWIUo9zN/JGZvwuAJqduqg==} | |
| - engines: {node: '>= 20'} | |
| - cpu: [x64] | |
| - os: [linux] | |
| - libc: [musl] | |
| - | |
| '@tailwindcss/oxide-wasm32-wasi@4.2.2': | |
| resolution: {integrity: sha512-eKSztKsmEsn1O5lJ4ZAfyn41NfG7vzCg496YiGtMDV86jz1q/irhms5O0VrY6ZwTUkFy/EKG3RfWgxSI3VbZ8Q==} | |
| engines: {node: '>=14.0.0'} | |
| @@ -2576,56 +2420,25 @@ packages: | |
| - '@emnapi/wasi-threads' | |
| - tslib | |
| - '@tailwindcss/oxide-wasm32-wasi@4.3.0': | |
| - resolution: {integrity: sha512-HNZGOUxEmElksYR7S6sC5jTeNGpobAsy9u7Gu0AskJ8/20FR9GqebUyB+HBcU/ax6BHuiuJi+Oda4B+YX6H1yA==} | |
| - engines: {node: '>=14.0.0'} | |
| - cpu: [wasm32] | |
| - bundledDependencies: | |
| - - '@napi-rs/wasm-runtime' | |
| - - '@emnapi/core' | |
| - - '@emnapi/runtime' | |
| - - '@tybys/wasm-util' | |
| - - '@emnapi/wasi-threads' | |
| - - tslib | |
| - | |
| '@tailwindcss/oxide-win32-arm64-msvc@4.2.2': | |
| resolution: {integrity: sha512-qPmaQM4iKu5mxpsrWZMOZRgZv1tOZpUm+zdhhQP0VhJfyGGO3aUKdbh3gDZc/dPLQwW4eSqWGrrcWNBZWUWaXQ==} | |
| engines: {node: '>= 20'} | |
| cpu: [arm64] | |
| os: [win32] | |
| - '@tailwindcss/oxide-win32-arm64-msvc@4.3.0': | |
| - resolution: {integrity: sha512-Pe+RPVTi1T+qymuuRpcdvwSVZjnll/f7n8gBxMMh3xLTctMDKqpdfGimbMyioqtLhUYZxdJ9wGNhV7MKHvgZsQ==} | |
| - engines: {node: '>= 20'} | |
| - cpu: [arm64] | |
| - os: [win32] | |
| - | |
| '@tailwindcss/oxide-win32-x64-msvc@4.2.2': | |
| resolution: {integrity: sha512-1T/37VvI7WyH66b+vqHj/cLwnCxt7Qt3WFu5Q8hk65aOvlwAhs7rAp1VkulBJw/N4tMirXjVnylTR72uI0HGcA==} | |
| engines: {node: '>= 20'} | |
| cpu: [x64] | |
| os: [win32] | |
| - '@tailwindcss/oxide-win32-x64-msvc@4.3.0': | |
| - resolution: {integrity: sha512-Mvrf2kXW/yeW/OTezZlCGOirXRcUuLIBx/5Y12BaPM7wJoryG6dfS/NJL8aBPqtTEx/Vm4T4vKzFUcKDT+TKUA==} | |
| - engines: {node: '>= 20'} | |
| - cpu: [x64] | |
| - os: [win32] | |
| - | |
| '@tailwindcss/oxide@4.2.2': | |
| resolution: {integrity: sha512-qEUA07+E5kehxYp9BVMpq9E8vnJuBHfJEC0vPC5e7iL/hw7HR61aDKoVoKzrG+QKp56vhNZe4qwkRmMC0zDLvg==} | |
| engines: {node: '>= 20'} | |
| - '@tailwindcss/oxide@4.3.0': | |
| - resolution: {integrity: sha512-F7HZGBeN9I0/AuuJS5PwcD8xayx5ri5GhjYUDBEVYUkexyA/giwbDNjRVrxSezE3T250OU2K/wp/ltWx3UOefg==} | |
| - engines: {node: '>= 20'} | |
| - | |
| '@tailwindcss/postcss@4.2.2': | |
| resolution: {integrity: sha512-n4goKQbW8RVXIbNKRB/45LzyUqN451deQK0nzIeauVEqjlI49slUlgKYJM2QyUzap/PcpnS7kzSUmPb1sCRvYQ==} | |
| - '@tailwindcss/postcss@4.3.0': | |
| - resolution: {integrity: sha512-Jm05Tjx+9yCLGv5qw1c+84Psds8MnyrEQYCB+FFk2lgGiUjlRqdxke4mVTuYrj2xnVZqKim2Apr5ySuQRYAw/w==} | |
| - | |
| '@tanstack/query-core@5.90.16': | |
| resolution: {integrity: sha512-MvtWckSVufs/ja463/K4PyJeqT+HMlJWtw6PrCpywznd2NSgO3m4KwO9RqbFqGg6iDE8vVMFWMeQI4Io3eEYww==} | |
| @@ -2668,9 +2481,6 @@ packages: | |
| '@types/node@25.5.0': | |
| resolution: {integrity: sha512-jp2P3tQMSxWugkCUKLRPVUpGaL5MVFwF8RDuSRztfwgN1wmqJeMSbKlnEtQqU8UrhTmzEmZdu2I6v2dpp7XIxw==} | |
| - '@types/node@25.7.0': | |
| - resolution: {integrity: sha512-z+pdZyxE+RTQE9AcboAZCb4otwcrvgHD+GlBpPgn0emDVt0ohrTMhAwlr2Wd9nZ+nihhYFxO2pThz3C5qSu2Eg==} | |
| - | |
| '@types/parse-json@4.0.2': | |
| resolution: {integrity: sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==} | |
| @@ -2862,6 +2672,18 @@ packages: | |
| cpu: [x64] | |
| os: [win32] | |
| + '@upstash/core-analytics@0.0.10': | |
| + resolution: {integrity: sha512-7qJHGxpQgQr9/vmeS1PktEwvNAF7TI4iJDi8Pu2CFZ9YUGHZH4fOP5TfYlZ4aVxfopnELiE4BS4FBjyK7V1/xQ==} | |
| + engines: {node: '>=16.0.0'} | |
| + | |
| + '@upstash/ratelimit@2.0.8': | |
| + resolution: {integrity: sha512-YSTMBJ1YIxsoPkUMX/P4DDks/xV5YYCswWMamU8ZIfK9ly6ppjRnVOyBhMDXBmzjODm4UQKcxsJPvaeFAijp5w==} | |
| + peerDependencies: | |
| + '@upstash/redis': ^1.34.3 | |
| + | |
| + '@upstash/redis@1.38.0': | |
| + resolution: {integrity: sha512-wu+dZBptlLy0+MCUEoHmzrY/TnmgDey3+c7EbIGwrLqAvkP8yi5MWZHYGIFtAygmL4Bkz2TdFu+eU0vFPncIcg==} | |
| + | |
| '@vitest/coverage-v8@3.2.4': | |
| resolution: {integrity: sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==} | |
| peerDependencies: | |
| @@ -3114,14 +2936,6 @@ packages: | |
| resolution: {integrity: sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==} | |
| engines: {node: '>= 0.4'} | |
| - b4a@1.8.0: | |
| - resolution: {integrity: sha512-qRuSmNSkGQaHwNbM7J78Wwy+ghLEYF1zNrSeMxj4Kgw6y33O3mXcQ6Ie9fRvfU/YnxWkOchPXbaLb73TkIsfdg==} | |
| - peerDependencies: | |
| - react-native-b4a: '*' | |
| - peerDependenciesMeta: | |
| - react-native-b4a: | |
| - optional: true | |
| - | |
| babel-plugin-macros@3.1.0: | |
| resolution: {integrity: sha512-Cg7TFGpIr01vOQNODXOOaGz2NpCU5gl8x1qJFbb6hbZxR7XrcE2vtbAsTAbJ7/xwJtUuJEw8K8Zr/AE0LHlesg==} | |
| engines: {node: '>=10', npm: '>=6'} | |
| @@ -3154,9 +2968,6 @@ packages: | |
| resolution: {integrity: sha512-I6MMLkn+anzNdCUp9hMRyui1HaNEUCco50lxbvNS4+EyXg8lN3nJ48PjPWtbH8UVS9CuMoaKE9U2V3l29DaRQw==} | |
| engines: {node: '>= 8.0.0'} | |
| - blake2b-wasm@2.4.0: | |
| - resolution: {integrity: sha512-S1kwmW2ZhZFFFOghcx73+ZajEfKBqhP82JMssxtLVMxlaPea1p9uoLiUZ5WYyHn0KddwbLc+0vh4wR0KBNoT5w==} | |
| - | |
| blakejs@1.2.1: | |
| resolution: {integrity: sha512-QXUSXI3QVc/gJME0dBpXrag1kbzOqCjCX8/b54ntNyW6sjtoqxqRk3LTmXzaJoh71zMsDCjM+47jS7XiwN/+fQ==} | |
| @@ -3303,10 +3114,6 @@ packages: | |
| resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} | |
| engines: {node: '>=8'} | |
| - circom_runtime@0.1.21: | |
| - resolution: {integrity: sha512-qTkud630B/GK8y76hnOaaS1aNuF6prfV0dTrkeRsiJKnlP1ryQbP2FWLgDOPqn6aKyaPlam+Z+DTbBhkEzh8dA==} | |
| - hasBin: true | |
| - | |
| circom_runtime@0.1.28: | |
| resolution: {integrity: sha512-ACagpQ7zBRLKDl5xRZ4KpmYIcZDUjOiNRuxvXLqhnnlLSVY1Dbvh73TI853nqoR0oEbihtWmMSjgc5f+pXf/jQ==} | |
| hasBin: true | |
| @@ -3320,7 +3127,7 @@ packages: | |
| hasBin: true | |
| peerDependencies: | |
| '@types/snarkjs': ^0.7.x | |
| - snarkjs: ^0.7.x | |
| + snarkjs: ^0.7.6 | |
| circomlib@2.0.5: | |
| resolution: {integrity: sha512-O7NQ8OS+J4eshBuoy36z/TwQU0YHw8W3zxZcs4hVwpEll3e4hDm3mgkIPqItN8FDeLEKZFK3YeT/+k8TiLF3/A==} | |
| @@ -3551,10 +3358,6 @@ packages: | |
| resolution: {integrity: sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==} | |
| engines: {node: '>=10.13.0'} | |
| - enhanced-resolve@5.21.3: | |
| - resolution: {integrity: sha512-QyL119InA+XXEkNLNTPCXPugSvOfhwv0JOlGNzvxs0hZaiHLNvXSpudUWsOlsXGWJh8G6ckCScEkVHfX3kw/2Q==} | |
| - engines: {node: '>=10.13.0'} | |
| - | |
| entities@6.0.1: | |
| resolution: {integrity: sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==} | |
| engines: {node: '>=0.12'} | |
| @@ -3635,8 +3438,8 @@ packages: | |
| engines: {node: '>=6.0'} | |
| hasBin: true | |
| - eslint-config-next@15.5.4: | |
| - resolution: {integrity: sha512-BzgVVuT3kfJes8i2GHenC1SRJ+W3BTML11lAOYFOOPzrk2xp66jBOAGEFRw+3LkYCln5UzvFsLhojrshb5Zfaw==} | |
| + eslint-config-next@15.5.16: | |
| + resolution: {integrity: sha512-9fi/wwYuBQSm2vHDVE8PMPsKIR/xXVOlwMrRp16qra6S/LQVhZ452cjnkPZb6PN/SZ3yJUQp1L4bTYoublvBKw==} | |
| peerDependencies: | |
| eslint: ^7.23.0 || ^8.0.0 || ^9.0.0 | |
| typescript: '>=3.3.1' | |
| @@ -3644,15 +3447,6 @@ packages: | |
| typescript: | |
| optional: true | |
| - eslint-config-next@16.2.6: | |
| - resolution: {integrity: sha512-z2ELYSkyrrJ6cuunTU8vhsT/RpouPkjaSah06nVW6Rg2Hpg0Vs8s497/e5s8G8qtdp4ccsiovz5P1rv+5VSW2Q==} | |
| - peerDependencies: | |
| - eslint: '>=9.0.0' | |
| - typescript: '>=3.3.1' | |
| - peerDependenciesMeta: | |
| - typescript: | |
| - optional: true | |
| - | |
| eslint-import-resolver-node@0.3.10: | |
| resolution: {integrity: sha512-tRrKqFyCaKict5hOd244sL6EQFNycnMQnBe+j8uqGNXYzsImGbGUU4ibtoaBmv5FLwJwcFJNeg1GeVjQfbMrDQ==} | |
| @@ -3712,12 +3506,6 @@ packages: | |
| peerDependencies: | |
| eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0 | |
| - eslint-plugin-react-hooks@7.1.1: | |
| - resolution: {integrity: sha512-f2I7Gw6JbvCexzIInuSbZpfdQ44D7iqdWX01FKLvrPgqxoE7oMj8clOfto8U6vYiz4yd5oKu39rRSVOe1zRu0g==} | |
| - engines: {node: '>=18'} | |
| - peerDependencies: | |
| - eslint: ^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0 || ^10.0.0 | |
| - | |
| eslint-plugin-react@7.37.5: | |
| resolution: {integrity: sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA==} | |
| engines: {node: '>=4'} | |
| @@ -3986,10 +3774,6 @@ packages: | |
| resolution: {integrity: sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==} | |
| engines: {node: '>=18'} | |
| - globals@16.4.0: | |
| - resolution: {integrity: sha512-ob/2LcVVaVGCYN+r14cnwnoDPUufjiYgSqRhiFD0Q1iI4Odora5RE8Iv1D24hAz5oMophRGkGz+yuvQmmUMnMw==} | |
| - engines: {node: '>=18'} | |
| - | |
| globalthis@1.0.4: | |
| resolution: {integrity: sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==} | |
| engines: {node: '>= 0.4'} | |
| @@ -4031,18 +3815,12 @@ packages: | |
| hermes-compiler@250829098.0.10: | |
| resolution: {integrity: sha512-TcRlZ0/TlyfJqquRFAWoyElVNnkdYRi/sEp4/Qy8/GYxjg8j2cS9D4MjuaQ+qimkmLN7AmO+44IznRf06mAr0w==} | |
| - hermes-estree@0.25.1: | |
| - resolution: {integrity: sha512-0wUoCcLp+5Ev5pDW2OriHC2MJCbwLwuRx+gAqMTOkGKJJiBCLjtrvy4PWUGn6MIVefecRpzoOZ/UV6iGdOr+Cw==} | |
| - | |
| hermes-estree@0.33.3: | |
| resolution: {integrity: sha512-6kzYZHCk8Fy1Uc+t3HGYyJn3OL4aeqKLTyina4UFtWl8I0kSL7OmKThaiX+Uh2f8nGw3mo4Ifxg0M5Zk3/Oeqg==} | |
| hermes-estree@0.35.0: | |
| resolution: {integrity: sha512-xVx5Opwy8Oo1I5yGpVRhCvWL/iV3M+ylksSKVNlxxD90cpDpR/AR1jLYqK8HWihm065a6UI3HeyAmYzwS8NOOg==} | |
| - hermes-parser@0.25.1: | |
| - resolution: {integrity: sha512-6pEjquH3rqaI6cYAXYPcz9MS4rY6R4ngRgrgfDshRptUZIc3lw0MCIJIGDj9++mfySOuPTHB4nrSW99BCvOPIA==} | |
| - | |
| hermes-parser@0.33.3: | |
| resolution: {integrity: sha512-Yg3HgaG4CqgyowtYjX/FsnPAuZdHOqSMtnbpylbptsQ9nwwSKsy6uRWcGO5RK0EqiX12q8HvDWKgeAVajRO5DA==} | |
| @@ -4340,9 +4118,6 @@ packages: | |
| resolution: {integrity: sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==} | |
| engines: {node: '>=0.10.0'} | |
| - js-sha3@0.8.0: | |
| - resolution: {integrity: sha512-gF1cRrHhIzNfToc802P800N8PpXS+evLLXfsVpowqmAFR9uwbi89WvXg2QspOmXL8QL86J4T1EpFu+yUkwJY3Q==} | |
| - | |
| js-tokens@10.0.0: | |
| resolution: {integrity: sha512-lM/UBzQmfJRo9ABXbPWemivdCW8V2G8FHaHdypQaIy523snUjog0W71ayWXTjiR+ixeMyVHN2XcpnTd/liPg/Q==} | |
| @@ -4609,11 +4384,6 @@ packages: | |
| peerDependencies: | |
| react: ^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0 | |
| - lucide-react@1.14.0: | |
| - resolution: {integrity: sha512-+1mdWcfSJVUsaTIjN9zoezmUhfXo5l0vP7ekBMPo3jcS/aIkxHnXqAPsByszMZx/Y8oQBRJxJx5xg+RH3urzxA==} | |
| - peerDependencies: | |
| - react: ^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0 | |
| - | |
| magic-string@0.30.21: | |
| resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} | |
| @@ -4762,9 +4532,6 @@ packages: | |
| mz@2.7.0: | |
| resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} | |
| - nanoassert@2.0.0: | |
| - resolution: {integrity: sha512-7vO7n28+aYO4J+8w96AzhmU8G+Y/xpPDJz/se19ICsqj/momRbb9mh9ZUtkoJ5X3nTnPdhEJyc0qnM6yAsHBaA==} | |
| - | |
| nanoid@3.3.11: | |
| resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} | |
| engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} | |
| @@ -4782,31 +4549,9 @@ packages: | |
| resolution: {integrity: sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==} | |
| engines: {node: '>= 0.6'} | |
| - next@15.5.4: | |
| - resolution: {integrity: sha512-xH4Yjhb82sFYQfY3vbkJfgSDgXvBB6a8xPs9i35k6oZJRoQRihZH+4s9Yo2qsWpzBmZ3lPXaJ2KPXLfkvW4LnA==} | |
| + next@15.5.16: | |
| + resolution: {integrity: sha512-aZExBk/V6JCu3NCFc90twdj9L/M3y0+ukeQwUAZbOiqRhAX+h2oMEa0NZFhcpj6HYRYjVS3V2/3xvyOpNnmw7A==} | |
| engines: {node: ^18.18.0 || ^19.8.0 || >= 20.0.0} | |
| - deprecated: This version has a security vulnerability. Please upgrade to a patched version. See https://nextjs.org/blog/CVE-2025-66478 for more details. | |
| - hasBin: true | |
| - peerDependencies: | |
| - '@opentelemetry/api': ^1.1.0 | |
| - '@playwright/test': ^1.51.1 | |
| - babel-plugin-react-compiler: '*' | |
| - react: ^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0 | |
| - react-dom: ^18.2.0 || 19.0.0-rc-de68d2f4-20241204 || ^19.0.0 | |
| - sass: ^1.3.0 | |
| - peerDependenciesMeta: | |
| - '@opentelemetry/api': | |
| - optional: true | |
| - '@playwright/test': | |
| - optional: true | |
| - babel-plugin-react-compiler: | |
| - optional: true | |
| - sass: | |
| - optional: true | |
| - | |
| - next@16.2.6: | |
| - resolution: {integrity: sha512-qOVgKJg1+At15NpeUP+eJgCHvTCgXsogweq87Ri/Ix7PkqQHg4sdaXmSFqKlgaIXE4kW0g25LE68W87UANlHtw==} | |
| - engines: {node: '>=20.9.0'} | |
| hasBin: true | |
| peerDependencies: | |
| '@opentelemetry/api': ^1.1.0 | |
| @@ -5059,10 +4804,6 @@ packages: | |
| resolution: {integrity: sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==} | |
| engines: {node: ^10 || ^12 || >=14} | |
| - postcss@8.5.14: | |
| - resolution: {integrity: sha512-SoSL4+OSEtR99LHFZQiJLkT59C5B1amGO1NzTwj7TT1qCUgUO6hxOvzkOYxD+vMrXBM3XJIKzokoERdqQq/Zmg==} | |
| - engines: {node: ^10 || ^12 || >=14} | |
| - | |
| postcss@8.5.8: | |
| resolution: {integrity: sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==} | |
| engines: {node: ^10 || ^12 || >=14} | |
| @@ -5137,11 +4878,6 @@ packages: | |
| peerDependencies: | |
| react: ^19.2.3 | |
| - react-dom@19.2.6: | |
| - resolution: {integrity: sha512-0prMI+hvBbPjsWnxDLxlCGyM8PN6UuWjEUCYmZhO67xIV9Xasa/r/vDnq+Xyq4Lo27g8QSbO5YzARu0D1Sps3g==} | |
| - peerDependencies: | |
| - react: ^19.2.6 | |
| - | |
| react-is@16.13.1: | |
| resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} | |
| @@ -5200,10 +4936,6 @@ packages: | |
| resolution: {integrity: sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA==} | |
| engines: {node: '>=0.10.0'} | |
| - react@19.2.6: | |
| - resolution: {integrity: sha512-sfWGGfavi0xr8Pg0sVsyHMAOziVYKgPLNrS7ig+ivMNb3wbCBw3KxtflsGBAwD3gYQlE/AEZsTLgToRrSCjb0Q==} | |
| - engines: {node: '>=0.10.0'} | |
| - | |
| readdirp@4.1.2: | |
| resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==} | |
| engines: {node: '>= 14.18.0'} | |
| @@ -5380,10 +5112,6 @@ packages: | |
| resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} | |
| engines: {node: '>=14'} | |
| - snarkjs@0.5.0: | |
| - resolution: {integrity: sha512-KWz8mZ2Y+6wvn6GGkQo6/ZlKwETdAGohd40Lzpwp5TUZCn6N6O4Az1SuX1rw/qREGL6Im+ycb19suCFE8/xaKA==} | |
| - hasBin: true | |
| - | |
| snarkjs@0.7.6: | |
| resolution: {integrity: sha512-4uH1xA5JzVU5jaaWS2fXej3+RC6L5Erhr6INTJtUA27du4Elbh4VXCeeRjB4QiwL6N6y7SNKePw5prTxyEf4Zg==} | |
| hasBin: true | |
| @@ -5564,17 +5292,10 @@ packages: | |
| tailwindcss@4.2.2: | |
| resolution: {integrity: sha512-KWBIxs1Xb6NoLdMVqhbhgwZf2PGBpPEiwOqgI4pFIYbNTfBXiKYyWoTsXgBQ9WFg/OlhnvHaY+AEpW7wSmFo2Q==} | |
| - tailwindcss@4.3.0: | |
| - resolution: {integrity: sha512-y6nxMGB1nMW9R6k96e5gdIFzcfL/gTJRNaqGes1YvkLnPVXzWgbqFF2yLC0T8G774n24cx3Pe8XrKoniCOAH+Q==} | |
| - | |
| tapable@2.3.2: | |
| resolution: {integrity: sha512-1MOpMXuhGzGL5TTCZFItxCc0AARf1EZFQkGqMm7ERKj8+Hgr5oLvJOVFcC+lRmR8hCe2S3jC4T5D7Vg/d7/fhA==} | |
| engines: {node: '>=6'} | |
| - tapable@2.3.3: | |
| - resolution: {integrity: sha512-uxc/zpqFg6x7C8vOE7lh6Lbda8eEL9zmVm/PLeTPBRhh1xCgdWaQ+J1CUieGpIfm2HdtsUpRv+HshiasBMcc6A==} | |
| - engines: {node: '>=6'} | |
| - | |
| terser@5.46.1: | |
| resolution: {integrity: sha512-vzCjQO/rgUuK9sf8VJZvjqiqiHFaZLnOiimmUuOKODxWL8mm/xua7viT7aqX7dgPY60otQjUotzFMmCB4VdmqQ==} | |
| engines: {node: '>=10'} | |
| @@ -5769,6 +5490,9 @@ packages: | |
| resolution: {integrity: sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==} | |
| engines: {node: '>= 0.4'} | |
| + uncrypto@0.1.3: | |
| + resolution: {integrity: sha512-Ql87qFHB3s/De2ClA9e0gsnS6zXG27SkTiSJwjCc9MebbfapQfuPzumMIUMi38ezPZVNFcHI9sUIepeQfw8J8Q==} | |
| + | |
| underscore@1.13.6: | |
| resolution: {integrity: sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==} | |
| @@ -5778,9 +5502,6 @@ packages: | |
| undici-types@7.18.2: | |
| resolution: {integrity: sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w==} | |
| - undici-types@7.21.0: | |
| - resolution: {integrity: sha512-w9IMgQrz4O0YN1LtB7K5P63vhlIOvC7opSmouCJ+ZywlPAlO9gIkJ+otk6LvGpAs2wg4econaCz3TvQ9xPoyuQ==} | |
| - | |
| unpipe@1.0.0: | |
| resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} | |
| engines: {node: '>= 0.8'} | |
| @@ -6225,12 +5946,6 @@ packages: | |
| resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} | |
| engines: {node: '>=10'} | |
| - zod-validation-error@4.0.2: | |
| - resolution: {integrity: sha512-Q6/nZLe6jxuU80qb/4uJ4t5v2VEZ44lzQjPDhYJNztRQ4wyWc6VF3D3Kb/fAuPetZQnhS3hnajCf9CsWesghLQ==} | |
| - engines: {node: '>=18.0.0'} | |
| - peerDependencies: | |
| - zod: ^3.25.0 || ^4.0.0 | |
| - | |
| zod@3.25.76: | |
| resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==} | |
| @@ -6360,19 +6075,19 @@ snapshots: | |
| '@bcoe/v8-coverage@1.0.2': {} | |
| - '@clerk/backend@2.33.2(react-dom@19.2.3(react@19.2.3))(react@19.2.3)': | |
| + '@clerk/backend@2.33.3(react-dom@19.2.3(react@19.2.3))(react@19.2.3)': | |
| dependencies: | |
| - '@clerk/shared': 3.47.4(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| - '@clerk/types': 4.101.22(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| + '@clerk/shared': 3.47.5(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| + '@clerk/types': 4.101.23(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| standardwebhooks: 1.0.0 | |
| tslib: 2.8.1 | |
| transitivePeerDependencies: | |
| - react | |
| - react-dom | |
| - '@clerk/clerk-react@5.61.5(react-dom@19.2.3(react@19.2.3))(react@19.2.3)': | |
| + '@clerk/clerk-react@5.61.6(react-dom@19.2.3(react@19.2.3))(react@19.2.3)': | |
| dependencies: | |
| - '@clerk/shared': 3.47.4(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| + '@clerk/shared': 3.47.5(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| react: 19.2.3 | |
| react-dom: 19.2.3(react@19.2.3) | |
| tslib: 2.8.1 | |
| @@ -6384,19 +6099,19 @@ snapshots: | |
| - react | |
| - react-dom | |
| - '@clerk/nextjs@6.39.2(next@15.5.4(@babel/core@7.29.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(react-dom@19.2.3(react@19.2.3))(react@19.2.3)': | |
| + '@clerk/nextjs@6.39.3(next@15.5.16(@babel/core@7.29.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3))(react-dom@19.2.3(react@19.2.3))(react@19.2.3)': | |
| dependencies: | |
| - '@clerk/backend': 2.33.2(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| - '@clerk/clerk-react': 5.61.5(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| - '@clerk/shared': 3.47.4(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| - '@clerk/types': 4.101.22(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| - next: 15.5.4(@babel/core@7.29.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| + '@clerk/backend': 2.33.3(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| + '@clerk/clerk-react': 5.61.6(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| + '@clerk/shared': 3.47.5(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| + '@clerk/types': 4.101.23(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| + next: 15.5.16(@babel/core@7.29.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| react: 19.2.3 | |
| react-dom: 19.2.3(react@19.2.3) | |
| server-only: 0.0.1 | |
| tslib: 2.8.1 | |
| - '@clerk/shared@3.47.4(react-dom@19.2.3(react@19.2.3))(react@19.2.3)': | |
| + '@clerk/shared@3.47.5(react-dom@19.2.3(react@19.2.3))(react@19.2.3)': | |
| dependencies: | |
| csstype: 3.1.3 | |
| dequal: 2.0.3 | |
| @@ -6419,9 +6134,9 @@ snapshots: | |
| react: 19.2.3 | |
| react-dom: 19.2.3(react@19.2.3) | |
| - '@clerk/types@4.101.22(react-dom@19.2.3(react@19.2.3))(react@19.2.3)': | |
| + '@clerk/types@4.101.23(react-dom@19.2.3(react@19.2.3))(react@19.2.3)': | |
| dependencies: | |
| - '@clerk/shared': 3.47.4(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| + '@clerk/shared': 3.47.5(react-dom@19.2.3(react@19.2.3))(react@19.2.3) | |
| transitivePeerDependencies: | |
| - react | |
| - react-dom | |
| @@ -6944,7 +6659,7 @@ snapshots: | |
| '@jest/schemas': 29.6.3 | |
| '@types/istanbul-lib-coverage': 2.0.6 | |
| '@types/istanbul-reports': 3.0.4 | |
| - '@types/node': 25.7.0 | |
| + '@types/node': 25.5.0 | |
| '@types/yargs': 17.0.35 | |
| chalk: 4.1.2 | |
| @@ -6986,64 +6701,34 @@ snapshots: | |
| '@tybys/wasm-util': 0.10.1 | |
| optional: true | |
| - '@next/env@15.5.4': {} | |
| - | |
| - '@next/env@16.2.6': {} | |
| - | |
| - '@next/eslint-plugin-next@15.5.4': | |
| - dependencies: | |
| - fast-glob: 3.3.1 | |
| + '@next/env@15.5.16': {} | |
| - '@next/eslint-plugin-next@16.2.6': | |
| + '@next/eslint-plugin-next@15.5.16': | |
| dependencies: | |
| fast-glob: 3.3.1 | |
| - '@next/swc-darwin-arm64@15.5.4': | |
| - optional: true | |
| - | |
| - '@next/swc-darwin-arm64@16.2.6': | |
| - optional: true | |
| - | |
| - '@next/swc-darwin-x64@15.5.4': | |
| - optional: true | |
| - | |
| - '@next/swc-darwin-x64@16.2.6': | |
| - optional: true | |
| - | |
| - '@next/swc-linux-arm64-gnu@15.5.4': | |
| - optional: true | |
| - | |
| - '@next/swc-linux-arm64-gnu@16.2.6': | |
| - optional: true | |
| - | |
| - '@next/swc-linux-arm64-musl@15.5.4': | |
| - optional: true | |
| - | |
| - '@next/swc-linux-arm64-musl@16.2.6': | |
| - optional: true | |
| - | |
| - '@next/swc-linux-x64-gnu@15.5.4': | |
| + '@next/swc-darwin-arm64@15.5.16': | |
| optional: true | |
| - '@next/swc-linux-x64-gnu@16.2.6': | |
| + '@next/swc-darwin-x64@15.5.16': | |
| optional: true | |
| - '@next/swc-linux-x64-musl@15.5.4': | |
| + '@next/swc-linux-arm64-gnu@15.5.16': | |
| optional: true | |
| - '@next/swc-linux-x64-musl@16.2.6': | |
| + '@next/swc-linux-arm64-musl@15.5.16': | |
| optional: true | |
| - '@next/swc-win32-arm64-msvc@15.5.4': | |
| + '@next/swc-linux-x64-gnu@15.5.16': | |
| optional: true | |
| - '@next/swc-win32-arm64-msvc@16.2.6': | |
| + '@next/swc-linux-x64-musl@15.5.16': | |
| optional: true | |
| - '@next/swc-win32-x64-msvc@15.5.4': | |
| + '@next/swc-win32-arm64-msvc@15.5.16': | |
| optional: true | |
| - '@next/swc-win32-x64-msvc@16.2.6': | |
| + '@next/swc-win32-x64-msvc@15.5.16': | |
| optional: true | |
| '@noble/ciphers@1.3.0': {} | |
| @@ -8324,88 +8009,42 @@ snapshots: | |
| source-map-js: 1.2.1 | |
| tailwindcss: 4.2.2 | |
| - '@tailwindcss/node@4.3.0': | |
| - dependencies: | |
| - '@jridgewell/remapping': 2.3.5 | |
| - enhanced-resolve: 5.21.3 | |
| - jiti: 2.6.1 | |
| - lightningcss: 1.32.0 | |
| - magic-string: 0.30.21 | |
| - source-map-js: 1.2.1 | |
| - tailwindcss: 4.3.0 | |
| - | |
| '@tailwindcss/oxide-android-arm64@4.2.2': | |
| optional: true | |
| - '@tailwindcss/oxide-android-arm64@4.3.0': | |
| - optional: true | |
| - | |
| '@tailwindcss/oxide-darwin-arm64@4.2.2': | |
| optional: true | |
| - '@tailwindcss/oxide-darwin-arm64@4.3.0': | |
| - optional: true | |
| - | |
| '@tailwindcss/oxide-darwin-x64@4.2.2': | |
| optional: true | |
| - '@tailwindcss/oxide-darwin-x64@4.3.0': | |
| - optional: true | |
| - | |
| '@tailwindcss/oxide-freebsd-x64@4.2.2': | |
| optional: true | |
| - '@tailwindcss/oxide-freebsd-x64@4.3.0': | |
| - optional: true | |
| - | |
| '@tailwindcss/oxide-linux-arm-gnueabihf@4.2.2': | |
| optional: true | |
| - '@tailwindcss/oxide-linux-arm-gnueabihf@4.3.0': | |
| - optional: true | |
| - | |
| '@tailwindcss/oxide-linux-arm64-gnu@4.2.2': | |
| optional: true | |
| - '@tailwindcss/oxide-linux-arm64-gnu@4.3.0': | |
| - optional: true | |
| - | |
| '@tailwindcss/oxide-linux-arm64-musl@4.2.2': | |
| optional: true | |
| - '@tailwindcss/oxide-linux-arm64-musl@4.3.0': | |
| - optional: true | |
| - | |
| '@tailwindcss/oxide-linux-x64-gnu@4.2.2': | |
| optional: true | |
| - '@tailwindcss/oxide-linux-x64-gnu@4.3.0': | |
| - optional: true | |
| - | |
| '@tailwindcss/oxide-linux-x64-musl@4.2.2': | |
| optional: true | |
| - '@tailwindcss/oxide-linux-x64-musl@4.3.0': | |
| - optional: true | |
| - | |
| '@tailwindcss/oxide-wasm32-wasi@4.2.2': | |
| optional: true | |
| - '@tailwindcss/oxide-wasm32-wasi@4.3.0': | |
| - optional: true | |
| - | |
| '@tailwindcss/oxide-win32-arm64-msvc@4.2.2': | |
| optional: true | |
| - '@tailwindcss/oxide-win32-arm64-msvc@4.3.0': | |
| - optional: true | |
| - | |
| '@tailwindcss/oxide-win32-x64-msvc@4.2.2': | |
| optional: true | |
| - '@tailwindcss/oxide-win32-x64-msvc@4.3.0': | |
| - optional: true | |
| - | |
| '@tailwindcss/oxide@4.2.2': | |
| optionalDependencies: | |
| '@tailwindcss/oxide-android-arm64': 4.2.2 | |
| @@ -8421,37 +8060,14 @@ snapshots: | |
| '@tailwindcss/oxide-win32-arm64-msvc': 4.2.2 | |
| '@tailwindcss/oxide-win32-x64-msvc': 4.2.2 | |
| - '@tailwindcss/oxide@4.3.0': | |
| - optionalDependencies: | |
| - '@tailwindcss/oxide-android-arm64': 4.3.0 | |
| - '@tailwindcss/oxide-darwin-arm64': 4.3.0 | |
| - '@tailwindcss/oxide-darwin-x64': 4.3.0 | |
| - '@tailwindcss/oxide-freebsd-x64': 4.3.0 | |
| - '@tailwindcss/oxide-linux-arm-gnueabihf': 4.3.0 | |
| - '@tailwindcss/oxide-linux-arm64-gnu': 4.3.0 | |
| - '@tailwindcss/oxide-linux-arm64-musl': 4.3.0 | |
| - '@tailwindcss/oxide-linux-x64-gnu': 4.3.0 | |
| - '@tailwindcss/oxide-linux-x64-musl': 4.3.0 | |
| - '@tailwindcss/oxide-wasm32-wasi': 4.3.0 | |
| - '@tailwindcss/oxide-win32-arm64-msvc': 4.3.0 | |
| - '@tailwindcss/oxide-win32-x64-msvc': 4.3.0 | |
| - | |
| '@tailwindcss/postcss@4.2.2': | |
| dependencies: | |
| '@alloc/quick-lru': 5.2.0 | |
| '@tailwindcss/node': 4.2.2 | |
| '@tailwindcss/oxide': 4.2.2 | |
| - postcss: 8.5.14 | |
| + postcss: 8.5.8 | |
| tailwindcss: 4.2.2 | |
| - '@tailwindcss/postcss@4.3.0': | |
| - dependencies: | |
| - '@alloc/quick-lru': 5.2.0 | |
| - '@tailwindcss/node': 4.3.0 | |
| - '@tailwindcss/oxide': 4.3.0 | |
| - postcss: 8.5.14 | |
| - tailwindcss: 4.3.0 | |
| - | |
| '@tanstack/query-core@5.90.16': {} | |
| '@tybys/wasm-util@0.10.1': | |
| @@ -8466,7 +8082,7 @@ snapshots: | |
| '@types/connect@3.4.38': | |
| dependencies: | |
| - '@types/node': 25.7.0 | |
| + '@types/node': 25.5.0 | |
| '@types/deep-eql@4.0.2': {} | |
| @@ -8496,10 +8112,6 @@ snapshots: | |
| dependencies: | |
| undici-types: 7.18.2 | |
| - '@types/node@25.7.0': | |
| - dependencies: | |
| - undici-types: 7.21.0 | |
| - | |
| '@types/parse-json@4.0.2': {} | |
| '@types/react-dom@19.2.3(@types/react@19.2.14)': | |
| @@ -8516,11 +8128,11 @@ snapshots: | |
| '@types/ws@7.4.7': | |
| dependencies: | |
| - '@types/node': 25.7.0 | |
| + '@types/node': 25.5.0 | |
| '@types/ws@8.18.1': | |
| dependencies: | |
| - '@types/node': 25.7.0 | |
| + '@types/node': 25.5.0 | |
| '@types/yargs-parser@21.0.3': {} | |
| @@ -8678,7 +8290,20 @@ snapshots: | |
| '@unrs/resolver-binding-win32-x64-msvc@1.11.1': | |
| optional: true | |
| - '@vitest/coverage-v8@3.2.4(vitest@3.2.4(@types/node@25.7.0)(jiti@2.6.1)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))': | |
| + '@upstash/core-analytics@0.0.10': | |
| + dependencies: | |
| + '@upstash/redis': 1.38.0 | |
| + | |
| + '@upstash/ratelimit@2.0.8(@upstash/redis@1.38.0)': | |
| + dependencies: | |
| + '@upstash/core-analytics': 0.0.10 | |
| + '@upstash/redis': 1.38.0 | |
| + | |
| + '@upstash/redis@1.38.0': | |
| + dependencies: | |
| + uncrypto: 0.1.3 | |
| + | |
| + '@vitest/coverage-v8@3.2.4(vitest@3.2.4(@types/node@25.5.0)(jiti@2.6.1)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))': | |
| dependencies: | |
| '@ampproject/remapping': 2.3.0 | |
| '@bcoe/v8-coverage': 1.0.2 | |
| @@ -8693,7 +8318,7 @@ snapshots: | |
| std-env: 3.10.0 | |
| test-exclude: 7.0.2 | |
| tinyrainbow: 2.0.0 | |
| - vitest: 3.2.4(@types/node@25.7.0)(jiti@2.6.1)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| + vitest: 3.2.4(@types/node@25.5.0)(jiti@2.6.1)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| transitivePeerDependencies: | |
| - supports-color | |
| @@ -8714,13 +8339,13 @@ snapshots: | |
| chai: 6.2.2 | |
| tinyrainbow: 3.1.0 | |
| - '@vitest/mocker@3.2.4(vite@7.3.1(@types/node@25.7.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))': | |
| + '@vitest/mocker@3.2.4(vite@7.3.1(@types/node@25.5.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))': | |
| dependencies: | |
| '@vitest/spy': 3.2.4 | |
| estree-walker: 3.0.3 | |
| magic-string: 0.30.21 | |
| optionalDependencies: | |
| - vite: 7.3.1(@types/node@25.7.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| + vite: 7.3.1(@types/node@25.5.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| '@vitest/mocker@4.1.0(vite@8.0.0(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))': | |
| dependencies: | |
| @@ -8730,14 +8355,6 @@ snapshots: | |
| optionalDependencies: | |
| vite: 8.0.0(@types/node@25.5.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| - '@vitest/mocker@4.1.0(vite@8.0.0(@types/node@25.7.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3))': | |
| - dependencies: | |
| - '@vitest/spy': 4.1.0 | |
| - estree-walker: 3.0.3 | |
| - magic-string: 0.30.21 | |
| - optionalDependencies: | |
| - vite: 8.0.0(@types/node@25.7.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| - | |
| '@vitest/pretty-format@3.2.4': | |
| dependencies: | |
| tinyrainbow: 2.0.0 | |
| @@ -8899,7 +8516,7 @@ snapshots: | |
| array-includes@3.1.9: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| call-bound: 1.0.4 | |
| define-properties: 1.2.1 | |
| es-abstract: 1.24.2 | |
| @@ -8910,7 +8527,7 @@ snapshots: | |
| array.prototype.findlast@1.2.5: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| define-properties: 1.2.1 | |
| es-abstract: 1.24.2 | |
| es-errors: 1.3.0 | |
| @@ -8919,7 +8536,7 @@ snapshots: | |
| array.prototype.findlastindex@1.2.6: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| call-bound: 1.0.4 | |
| define-properties: 1.2.1 | |
| es-abstract: 1.24.2 | |
| @@ -8929,21 +8546,21 @@ snapshots: | |
| array.prototype.flat@1.3.3: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| define-properties: 1.2.1 | |
| es-abstract: 1.24.2 | |
| es-shim-unscopables: 1.1.0 | |
| array.prototype.flatmap@1.3.3: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| define-properties: 1.2.1 | |
| es-abstract: 1.24.2 | |
| es-shim-unscopables: 1.1.0 | |
| array.prototype.tosorted@1.1.4: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| define-properties: 1.2.1 | |
| es-abstract: 1.24.2 | |
| es-errors: 1.3.0 | |
| @@ -8952,7 +8569,7 @@ snapshots: | |
| arraybuffer.prototype.slice@1.0.4: | |
| dependencies: | |
| array-buffer-byte-length: 1.0.2 | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| define-properties: 1.2.1 | |
| es-abstract: 1.24.2 | |
| es-errors: 1.3.0 | |
| @@ -8985,8 +8602,6 @@ snapshots: | |
| axobject-query@4.1.0: {} | |
| - b4a@1.8.0: {} | |
| - | |
| babel-plugin-macros@3.1.0: | |
| dependencies: | |
| '@babel/runtime': 7.29.2 | |
| @@ -9019,13 +8634,6 @@ snapshots: | |
| jsonpath: 1.3.0 | |
| tryer: 1.0.1 | |
| - blake2b-wasm@2.4.0: | |
| - dependencies: | |
| - b4a: 1.8.0 | |
| - nanoassert: 2.0.0 | |
| - transitivePeerDependencies: | |
| - - react-native-b4a | |
| - | |
| blakejs@1.2.1: {} | |
| bluebird@3.7.2: {} | |
| @@ -9171,7 +8779,7 @@ snapshots: | |
| chrome-launcher@0.15.2: | |
| dependencies: | |
| - '@types/node': 25.7.0 | |
| + '@types/node': 25.5.0 | |
| escape-string-regexp: 4.0.0 | |
| is-wsl: 2.2.0 | |
| lighthouse-logger: 1.4.2 | |
| @@ -9180,7 +8788,7 @@ snapshots: | |
| chromium-edge-launcher@0.3.0: | |
| dependencies: | |
| - '@types/node': 25.7.0 | |
| + '@types/node': 25.5.0 | |
| escape-string-regexp: 4.0.0 | |
| is-wsl: 2.2.0 | |
| lighthouse-logger: 1.4.2 | |
| @@ -9192,10 +8800,6 @@ snapshots: | |
| ci-info@3.9.0: {} | |
| - circom_runtime@0.1.21: | |
| - dependencies: | |
| - ffjavascript: 0.2.56 | |
| - | |
| circom_runtime@0.1.28: | |
| dependencies: | |
| ffjavascript: 0.3.1 | |
| @@ -9207,11 +8811,9 @@ snapshots: | |
| ffjavascript: 0.2.63 | |
| fnv-plus: 1.3.1 | |
| r1csfile: 0.0.41 | |
| - snarkjs: 0.5.0 | |
| + snarkjs: 0.7.6 | |
| tmp-promise: 3.0.3 | |
| util: 0.12.5 | |
| - transitivePeerDependencies: | |
| - - react-native-b4a | |
| circomkit@0.3.4(@types/snarkjs@0.7.9)(snarkjs@0.7.6): | |
| dependencies: | |
| @@ -9220,8 +8822,6 @@ snapshots: | |
| commander: 12.1.0 | |
| loglevel: 1.9.2 | |
| snarkjs: 0.7.6 | |
| - transitivePeerDependencies: | |
| - - react-native-b4a | |
| circomlib@2.0.5: {} | |
| @@ -9424,11 +9024,6 @@ snapshots: | |
| graceful-fs: 4.2.11 | |
| tapable: 2.3.2 | |
| - enhanced-resolve@5.21.3: | |
| - dependencies: | |
| - graceful-fs: 4.2.11 | |
| - tapable: 2.3.3 | |
| - | |
| entities@6.0.1: | |
| optional: true | |
| @@ -9445,7 +9040,7 @@ snapshots: | |
| array-buffer-byte-length: 1.0.2 | |
| arraybuffer.prototype.slice: 1.0.4 | |
| available-typed-arrays: 1.0.7 | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| call-bound: 1.0.4 | |
| data-view-buffer: 1.0.2 | |
| data-view-byte-length: 1.0.2 | |
| @@ -9623,16 +9218,16 @@ snapshots: | |
| optionalDependencies: | |
| source-map: 0.6.1 | |
| - eslint-config-next@15.5.4(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3): | |
| + eslint-config-next@15.5.16(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3): | |
| dependencies: | |
| - '@next/eslint-plugin-next': 15.5.4 | |
| + '@next/eslint-plugin-next': 15.5.16 | |
| '@rushstack/eslint-patch': 1.16.1 | |
| '@typescript-eslint/eslint-plugin': 8.57.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) | |
| '@typescript-eslint/parser': 8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) | |
| eslint: 9.39.4(jiti@2.6.1) | |
| eslint-import-resolver-node: 0.3.10 | |
| eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.4(jiti@2.6.1)))(eslint@9.39.4(jiti@2.6.1)) | |
| - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.4(jiti@2.6.1)) | |
| + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.4(jiti@2.6.1)))(eslint@9.39.4(jiti@2.6.1)))(eslint@9.39.4(jiti@2.6.1)) | |
| eslint-plugin-jsx-a11y: 6.10.2(eslint@9.39.4(jiti@2.6.1)) | |
| eslint-plugin-react: 7.37.5(eslint@9.39.4(jiti@2.6.1)) | |
| eslint-plugin-react-hooks: 5.2.0(eslint@9.39.4(jiti@2.6.1)) | |
| @@ -9643,26 +9238,6 @@ snapshots: | |
| - eslint-plugin-import-x | |
| - supports-color | |
| - eslint-config-next@16.2.6(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3): | |
| - dependencies: | |
| - '@next/eslint-plugin-next': 16.2.6 | |
| - eslint: 9.39.4(jiti@2.6.1) | |
| - eslint-import-resolver-node: 0.3.10 | |
| - eslint-import-resolver-typescript: 3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.4(jiti@2.6.1)))(eslint@9.39.4(jiti@2.6.1)) | |
| - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.4(jiti@2.6.1)) | |
| - eslint-plugin-jsx-a11y: 6.10.2(eslint@9.39.4(jiti@2.6.1)) | |
| - eslint-plugin-react: 7.37.5(eslint@9.39.4(jiti@2.6.1)) | |
| - eslint-plugin-react-hooks: 7.1.1(eslint@9.39.4(jiti@2.6.1)) | |
| - globals: 16.4.0 | |
| - typescript-eslint: 8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3) | |
| - optionalDependencies: | |
| - typescript: 5.9.3 | |
| - transitivePeerDependencies: | |
| - - '@typescript-eslint/parser' | |
| - - eslint-import-resolver-webpack | |
| - - eslint-plugin-import-x | |
| - - supports-color | |
| - | |
| eslint-import-resolver-node@0.3.10: | |
| dependencies: | |
| debug: 3.2.7 | |
| @@ -9682,7 +9257,7 @@ snapshots: | |
| tinyglobby: 0.2.15 | |
| unrs-resolver: 1.11.1 | |
| optionalDependencies: | |
| - eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.4(jiti@2.6.1)) | |
| + eslint-plugin-import: 2.32.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.4(jiti@2.6.1)))(eslint@9.39.4(jiti@2.6.1)))(eslint@9.39.4(jiti@2.6.1)) | |
| transitivePeerDependencies: | |
| - supports-color | |
| @@ -9697,7 +9272,7 @@ snapshots: | |
| transitivePeerDependencies: | |
| - supports-color | |
| - eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1)(eslint@9.39.4(jiti@2.6.1)): | |
| + eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.32.0(@typescript-eslint/parser@8.57.0(eslint@9.39.4(jiti@2.6.1))(typescript@5.9.3))(eslint@9.39.4(jiti@2.6.1)))(eslint@9.39.4(jiti@2.6.1)))(eslint@9.39.4(jiti@2.6.1)): | |
| dependencies: | |
| '@rtsao/scc': 1.1.0 | |
| array-includes: 3.1.9 | |
| @@ -9749,17 +9324,6 @@ snapshots: | |
| dependencies: | |
| eslint: 9.39.4(jiti@2.6.1) | |
| - eslint-plugin-react-hooks@7.1.1(eslint@9.39.4(jiti@2.6.1)): | |
| - dependencies: | |
| - '@babel/core': 7.29.0 | |
| - '@babel/parser': 7.29.0 | |
| - eslint: 9.39.4(jiti@2.6.1) | |
| - hermes-parser: 0.25.1 | |
| - zod: 3.25.76 | |
| - zod-validation-error: 4.0.2(zod@3.25.76) | |
| - transitivePeerDependencies: | |
| - - supports-color | |
| - | |
| eslint-plugin-react@7.37.5(eslint@9.39.4(jiti@2.6.1)): | |
| dependencies: | |
| array-includes: 3.1.9 | |
| @@ -10014,7 +9578,7 @@ snapshots: | |
| function.prototype.name@1.1.8: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| call-bound: 1.0.4 | |
| define-properties: 1.2.1 | |
| functions-have-names: 1.2.3 | |
| @@ -10082,8 +9646,6 @@ snapshots: | |
| globals@14.0.0: {} | |
| - globals@16.4.0: {} | |
| - | |
| globalthis@1.0.4: | |
| dependencies: | |
| define-properties: 1.2.1 | |
| @@ -10117,16 +9679,10 @@ snapshots: | |
| hermes-compiler@250829098.0.10: {} | |
| - hermes-estree@0.25.1: {} | |
| - | |
| hermes-estree@0.33.3: {} | |
| hermes-estree@0.35.0: {} | |
| - hermes-parser@0.25.1: | |
| - dependencies: | |
| - hermes-estree: 0.25.1 | |
| - | |
| hermes-parser@0.33.3: | |
| dependencies: | |
| hermes-estree: 0.33.3 | |
| @@ -10223,7 +9779,7 @@ snapshots: | |
| is-array-buffer@3.0.5: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| call-bound: 1.0.4 | |
| get-intrinsic: 1.3.0 | |
| @@ -10426,7 +9982,7 @@ snapshots: | |
| jest-util@29.7.0: | |
| dependencies: | |
| '@jest/types': 29.6.3 | |
| - '@types/node': 25.7.0 | |
| + '@types/node': 25.5.0 | |
| chalk: 4.1.2 | |
| ci-info: 3.9.0 | |
| graceful-fs: 4.2.11 | |
| @@ -10443,7 +9999,7 @@ snapshots: | |
| jest-worker@29.7.0: | |
| dependencies: | |
| - '@types/node': 25.7.0 | |
| + '@types/node': 25.5.0 | |
| jest-util: 29.7.0 | |
| merge-stream: 2.0.0 | |
| supports-color: 8.1.1 | |
| @@ -10458,8 +10014,6 @@ snapshots: | |
| js-levenshtein@1.1.6: {} | |
| - js-sha3@0.8.0: {} | |
| - | |
| js-tokens@10.0.0: {} | |
| js-tokens@4.0.0: {} | |
| @@ -10691,10 +10245,6 @@ snapshots: | |
| dependencies: | |
| react: 19.2.3 | |
| - lucide-react@1.14.0(react@19.2.6): | |
| - dependencies: | |
| - react: 19.2.6 | |
| - | |
| magic-string@0.30.21: | |
| dependencies: | |
| '@jridgewell/sourcemap-codec': 1.5.5 | |
| @@ -10955,8 +10505,6 @@ snapshots: | |
| object-assign: 4.1.1 | |
| thenify-all: 1.6.0 | |
| - nanoassert@2.0.0: {} | |
| - | |
| nanoid@3.3.11: {} | |
| napi-postinstall@0.3.4: {} | |
| @@ -10965,9 +10513,9 @@ snapshots: | |
| negotiator@1.0.0: {} | |
| - next@15.5.4(@babel/core@7.29.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3): | |
| + next@15.5.16(@babel/core@7.29.0)(react-dom@19.2.3(react@19.2.3))(react@19.2.3): | |
| dependencies: | |
| - '@next/env': 15.5.4 | |
| + '@next/env': 15.5.16 | |
| '@swc/helpers': 0.5.15 | |
| caniuse-lite: 1.0.30001787 | |
| postcss: 8.4.31 | |
| @@ -10975,38 +10523,14 @@ snapshots: | |
| react-dom: 19.2.3(react@19.2.3) | |
| styled-jsx: 5.1.6(@babel/core@7.29.0)(react@19.2.3) | |
| optionalDependencies: | |
| - '@next/swc-darwin-arm64': 15.5.4 | |
| - '@next/swc-darwin-x64': 15.5.4 | |
| - '@next/swc-linux-arm64-gnu': 15.5.4 | |
| - '@next/swc-linux-arm64-musl': 15.5.4 | |
| - '@next/swc-linux-x64-gnu': 15.5.4 | |
| - '@next/swc-linux-x64-musl': 15.5.4 | |
| - '@next/swc-win32-arm64-msvc': 15.5.4 | |
| - '@next/swc-win32-x64-msvc': 15.5.4 | |
| - sharp: 0.34.5 | |
| - transitivePeerDependencies: | |
| - - '@babel/core' | |
| - - babel-plugin-macros | |
| - | |
| - next@16.2.6(@babel/core@7.29.0)(react-dom@19.2.6(react@19.2.6))(react@19.2.6): | |
| - dependencies: | |
| - '@next/env': 16.2.6 | |
| - '@swc/helpers': 0.5.15 | |
| - baseline-browser-mapping: 2.10.16 | |
| - caniuse-lite: 1.0.30001787 | |
| - postcss: 8.4.31 | |
| - react: 19.2.6 | |
| - react-dom: 19.2.6(react@19.2.6) | |
| - styled-jsx: 5.1.6(@babel/core@7.29.0)(react@19.2.6) | |
| - optionalDependencies: | |
| - '@next/swc-darwin-arm64': 16.2.6 | |
| - '@next/swc-darwin-x64': 16.2.6 | |
| - '@next/swc-linux-arm64-gnu': 16.2.6 | |
| - '@next/swc-linux-arm64-musl': 16.2.6 | |
| - '@next/swc-linux-x64-gnu': 16.2.6 | |
| - '@next/swc-linux-x64-musl': 16.2.6 | |
| - '@next/swc-win32-arm64-msvc': 16.2.6 | |
| - '@next/swc-win32-x64-msvc': 16.2.6 | |
| + '@next/swc-darwin-arm64': 15.5.16 | |
| + '@next/swc-darwin-x64': 15.5.16 | |
| + '@next/swc-linux-arm64-gnu': 15.5.16 | |
| + '@next/swc-linux-arm64-musl': 15.5.16 | |
| + '@next/swc-linux-x64-gnu': 15.5.16 | |
| + '@next/swc-linux-x64-musl': 15.5.16 | |
| + '@next/swc-win32-arm64-msvc': 15.5.16 | |
| + '@next/swc-win32-x64-msvc': 15.5.16 | |
| sharp: 0.34.5 | |
| transitivePeerDependencies: | |
| - '@babel/core' | |
| @@ -11047,7 +10571,7 @@ snapshots: | |
| object.assign@4.1.7: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| call-bound: 1.0.4 | |
| define-properties: 1.2.1 | |
| es-object-atoms: 1.1.1 | |
| @@ -11056,27 +10580,27 @@ snapshots: | |
| object.entries@1.1.9: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| call-bound: 1.0.4 | |
| define-properties: 1.2.1 | |
| es-object-atoms: 1.1.1 | |
| object.fromentries@2.0.8: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| define-properties: 1.2.1 | |
| es-abstract: 1.24.2 | |
| es-object-atoms: 1.1.1 | |
| object.groupby@1.0.3: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| define-properties: 1.2.1 | |
| es-abstract: 1.24.2 | |
| object.values@1.2.1: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| call-bound: 1.0.4 | |
| define-properties: 1.2.1 | |
| es-object-atoms: 1.1.1 | |
| @@ -11227,12 +10751,12 @@ snapshots: | |
| possible-typed-array-names@1.1.0: {} | |
| - postcss-load-config@6.0.1(jiti@2.6.1)(postcss@8.5.14)(tsx@4.21.0)(yaml@2.8.3): | |
| + postcss-load-config@6.0.1(jiti@2.6.1)(postcss@8.5.8)(tsx@4.21.0)(yaml@2.8.3): | |
| dependencies: | |
| lilconfig: 3.1.3 | |
| optionalDependencies: | |
| jiti: 2.6.1 | |
| - postcss: 8.5.14 | |
| + postcss: 8.5.8 | |
| tsx: 4.21.0 | |
| yaml: 2.8.3 | |
| @@ -11242,12 +10766,6 @@ snapshots: | |
| picocolors: 1.1.1 | |
| source-map-js: 1.2.1 | |
| - postcss@8.5.14: | |
| - dependencies: | |
| - nanoid: 3.3.11 | |
| - picocolors: 1.1.1 | |
| - source-map-js: 1.2.1 | |
| - | |
| postcss@8.5.8: | |
| dependencies: | |
| nanoid: 3.3.11 | |
| @@ -11384,11 +10902,6 @@ snapshots: | |
| react: 19.2.3 | |
| scheduler: 0.27.0 | |
| - react-dom@19.2.6(react@19.2.6): | |
| - dependencies: | |
| - react: 19.2.6 | |
| - scheduler: 0.27.0 | |
| - | |
| react-is@16.13.1: {} | |
| react-is@18.3.1: {} | |
| @@ -11469,13 +10982,11 @@ snapshots: | |
| react@19.2.3: {} | |
| - react@19.2.6: {} | |
| - | |
| readdirp@4.1.2: {} | |
| reflect.getprototypeof@1.0.10: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| define-properties: 1.2.1 | |
| es-abstract: 1.24.2 | |
| es-errors: 1.3.0 | |
| @@ -11488,7 +10999,7 @@ snapshots: | |
| regexp.prototype.flags@1.5.4: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| define-properties: 1.2.1 | |
| es-errors: 1.3.0 | |
| get-proto: 1.0.1 | |
| @@ -11599,7 +11110,7 @@ snapshots: | |
| safe-array-concat@1.1.3: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| call-bound: 1.0.4 | |
| get-intrinsic: 1.3.0 | |
| has-symbols: 1.1.0 | |
| @@ -11761,21 +11272,6 @@ snapshots: | |
| signal-exit@4.1.0: {} | |
| - snarkjs@0.5.0: | |
| - dependencies: | |
| - '@iden3/binfileutils': 0.0.11 | |
| - bfj: 7.1.0 | |
| - blake2b-wasm: 2.4.0 | |
| - circom_runtime: 0.1.21 | |
| - ejs: 3.1.10 | |
| - fastfile: 0.0.20 | |
| - ffjavascript: 0.2.56 | |
| - js-sha3: 0.8.0 | |
| - logplease: 1.2.15 | |
| - r1csfile: 0.0.41 | |
| - transitivePeerDependencies: | |
| - - react-native-b4a | |
| - | |
| snarkjs@0.7.6: | |
| dependencies: | |
| '@iden3/binfileutils': 0.0.12 | |
| @@ -11858,13 +11354,13 @@ snapshots: | |
| string.prototype.includes@2.0.1: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| define-properties: 1.2.1 | |
| es-abstract: 1.24.2 | |
| string.prototype.matchall@4.0.12: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| call-bound: 1.0.4 | |
| define-properties: 1.2.1 | |
| es-abstract: 1.24.2 | |
| @@ -11885,7 +11381,7 @@ snapshots: | |
| string.prototype.trim@1.2.10: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| call-bound: 1.0.4 | |
| define-data-property: 1.1.4 | |
| define-properties: 1.2.1 | |
| @@ -11895,14 +11391,14 @@ snapshots: | |
| string.prototype.trimend@1.0.9: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| call-bound: 1.0.4 | |
| define-properties: 1.2.1 | |
| es-object-atoms: 1.1.1 | |
| string.prototype.trimstart@1.0.8: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| define-properties: 1.2.1 | |
| es-object-atoms: 1.1.1 | |
| @@ -11929,13 +11425,6 @@ snapshots: | |
| optionalDependencies: | |
| '@babel/core': 7.29.0 | |
| - styled-jsx@5.1.6(@babel/core@7.29.0)(react@19.2.6): | |
| - dependencies: | |
| - client-only: 0.0.1 | |
| - react: 19.2.6 | |
| - optionalDependencies: | |
| - '@babel/core': 7.29.0 | |
| - | |
| stylis@4.2.0: {} | |
| sucrase@3.35.1: | |
| @@ -11977,12 +11466,8 @@ snapshots: | |
| tailwindcss@4.2.2: {} | |
| - tailwindcss@4.3.0: {} | |
| - | |
| tapable@2.3.2: {} | |
| - tapable@2.3.3: {} | |
| - | |
| terser@5.46.1: | |
| dependencies: | |
| '@jridgewell/source-map': 0.3.11 | |
| @@ -12084,7 +11569,7 @@ snapshots: | |
| tslib@2.8.1: {} | |
| - tsup@8.5.1(jiti@2.6.1)(postcss@8.5.14)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3): | |
| + tsup@8.5.1(jiti@2.6.1)(postcss@8.5.8)(tsx@4.21.0)(typescript@5.9.3)(yaml@2.8.3): | |
| dependencies: | |
| bundle-require: 5.1.0(esbuild@0.27.4) | |
| cac: 6.7.14 | |
| @@ -12095,7 +11580,7 @@ snapshots: | |
| fix-dts-default-cjs-exports: 1.0.1 | |
| joycon: 3.1.1 | |
| picocolors: 1.1.1 | |
| - postcss-load-config: 6.0.1(jiti@2.6.1)(postcss@8.5.14)(tsx@4.21.0)(yaml@2.8.3) | |
| + postcss-load-config: 6.0.1(jiti@2.6.1)(postcss@8.5.8)(tsx@4.21.0)(yaml@2.8.3) | |
| resolve-from: 5.0.0 | |
| rollup: 4.59.0 | |
| source-map: 0.7.6 | |
| @@ -12104,7 +11589,7 @@ snapshots: | |
| tinyglobby: 0.2.15 | |
| tree-kill: 1.2.2 | |
| optionalDependencies: | |
| - postcss: 8.5.14 | |
| + postcss: 8.5.8 | |
| typescript: 5.9.3 | |
| transitivePeerDependencies: | |
| - jiti | |
| @@ -12139,7 +11624,7 @@ snapshots: | |
| typed-array-byte-length@1.0.3: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| for-each: 0.3.5 | |
| gopd: 1.2.0 | |
| has-proto: 1.2.0 | |
| @@ -12148,7 +11633,7 @@ snapshots: | |
| typed-array-byte-offset@1.0.4: | |
| dependencies: | |
| available-typed-arrays: 1.0.7 | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| for-each: 0.3.5 | |
| gopd: 1.2.0 | |
| has-proto: 1.2.0 | |
| @@ -12157,7 +11642,7 @@ snapshots: | |
| typed-array-length@1.0.7: | |
| dependencies: | |
| - call-bind: 1.0.8 | |
| + call-bind: 1.0.9 | |
| for-each: 0.3.5 | |
| gopd: 1.2.0 | |
| is-typed-array: 1.1.15 | |
| @@ -12186,14 +11671,14 @@ snapshots: | |
| has-symbols: 1.1.0 | |
| which-boxed-primitive: 1.1.1 | |
| + uncrypto@0.1.3: {} | |
| + | |
| underscore@1.13.6: {} | |
| undici-types@6.19.8: {} | |
| undici-types@7.18.2: {} | |
| - undici-types@7.21.0: {} | |
| - | |
| unpipe@1.0.0: {} | |
| unrs-resolver@1.11.1: | |
| @@ -12287,13 +11772,13 @@ snapshots: | |
| - utf-8-validate | |
| - zod | |
| - vite-node@3.2.4(@types/node@25.7.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3): | |
| + vite-node@3.2.4(@types/node@25.5.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3): | |
| dependencies: | |
| cac: 6.7.14 | |
| debug: 4.4.3(supports-color@10.2.2) | |
| es-module-lexer: 1.7.0 | |
| pathe: 2.0.3 | |
| - vite: 6.4.2(@types/node@25.7.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| + vite: 6.4.2(@types/node@25.5.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| transitivePeerDependencies: | |
| - '@types/node' | |
| - jiti | |
| @@ -12308,16 +11793,16 @@ snapshots: | |
| - tsx | |
| - yaml | |
| - vite@6.4.2(@types/node@25.7.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3): | |
| + vite@6.4.2(@types/node@25.5.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3): | |
| dependencies: | |
| esbuild: 0.25.12 | |
| fdir: 6.5.0(picomatch@4.0.3) | |
| picomatch: 4.0.3 | |
| - postcss: 8.5.14 | |
| + postcss: 8.5.8 | |
| rollup: 4.59.0 | |
| tinyglobby: 0.2.15 | |
| optionalDependencies: | |
| - '@types/node': 25.7.0 | |
| + '@types/node': 25.5.0 | |
| fsevents: 2.3.3 | |
| jiti: 2.6.1 | |
| lightningcss: 1.32.0 | |
| @@ -12325,16 +11810,16 @@ snapshots: | |
| tsx: 4.21.0 | |
| yaml: 2.8.3 | |
| - vite@7.3.1(@types/node@25.7.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3): | |
| + vite@7.3.1(@types/node@25.5.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3): | |
| dependencies: | |
| esbuild: 0.27.4 | |
| fdir: 6.5.0(picomatch@4.0.3) | |
| picomatch: 4.0.3 | |
| - postcss: 8.5.14 | |
| + postcss: 8.5.8 | |
| rollup: 4.59.0 | |
| tinyglobby: 0.2.15 | |
| optionalDependencies: | |
| - '@types/node': 25.7.0 | |
| + '@types/node': 25.5.0 | |
| fsevents: 2.3.3 | |
| jiti: 2.6.1 | |
| lightningcss: 1.32.0 | |
| @@ -12347,7 +11832,7 @@ snapshots: | |
| '@oxc-project/runtime': 0.115.0 | |
| lightningcss: 1.32.0 | |
| picomatch: 4.0.3 | |
| - postcss: 8.5.14 | |
| + postcss: 8.5.8 | |
| rolldown: 1.0.0-rc.9 | |
| tinyglobby: 0.2.15 | |
| optionalDependencies: | |
| @@ -12359,28 +11844,11 @@ snapshots: | |
| tsx: 4.21.0 | |
| yaml: 2.8.3 | |
| - vite@8.0.0(@types/node@25.7.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3): | |
| - dependencies: | |
| - '@oxc-project/runtime': 0.115.0 | |
| - lightningcss: 1.32.0 | |
| - picomatch: 4.0.3 | |
| - postcss: 8.5.14 | |
| - rolldown: 1.0.0-rc.9 | |
| - tinyglobby: 0.2.15 | |
| - optionalDependencies: | |
| - '@types/node': 25.7.0 | |
| - esbuild: 0.27.4 | |
| - fsevents: 2.3.3 | |
| - jiti: 2.6.1 | |
| - terser: 5.46.1 | |
| - tsx: 4.21.0 | |
| - yaml: 2.8.3 | |
| - | |
| - vitest@3.2.4(@types/node@25.7.0)(jiti@2.6.1)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3): | |
| + vitest@3.2.4(@types/node@25.5.0)(jiti@2.6.1)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3): | |
| dependencies: | |
| '@types/chai': 5.2.3 | |
| '@vitest/expect': 3.2.4 | |
| - '@vitest/mocker': 3.2.4(vite@7.3.1(@types/node@25.7.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) | |
| + '@vitest/mocker': 3.2.4(vite@7.3.1(@types/node@25.5.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) | |
| '@vitest/pretty-format': 3.2.4 | |
| '@vitest/runner': 3.2.4 | |
| '@vitest/snapshot': 3.2.4 | |
| @@ -12398,11 +11866,11 @@ snapshots: | |
| tinyglobby: 0.2.15 | |
| tinypool: 1.1.1 | |
| tinyrainbow: 2.0.0 | |
| - vite: 7.3.1(@types/node@25.7.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| - vite-node: 3.2.4(@types/node@25.7.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| + vite: 7.3.1(@types/node@25.5.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| + vite-node: 3.2.4(@types/node@25.5.0)(jiti@2.6.1)(lightningcss@1.32.0)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| why-is-node-running: 2.3.0 | |
| optionalDependencies: | |
| - '@types/node': 25.7.0 | |
| + '@types/node': 25.5.0 | |
| jsdom: 26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6) | |
| transitivePeerDependencies: | |
| - jiti | |
| @@ -12446,34 +11914,6 @@ snapshots: | |
| transitivePeerDependencies: | |
| - msw | |
| - vitest@4.1.0(@types/node@25.7.0)(jsdom@26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6))(vite@8.0.0(@types/node@25.7.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)): | |
| - dependencies: | |
| - '@vitest/expect': 4.1.0 | |
| - '@vitest/mocker': 4.1.0(vite@8.0.0(@types/node@25.7.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3)) | |
| - '@vitest/pretty-format': 4.1.0 | |
| - '@vitest/runner': 4.1.0 | |
| - '@vitest/snapshot': 4.1.0 | |
| - '@vitest/spy': 4.1.0 | |
| - '@vitest/utils': 4.1.0 | |
| - es-module-lexer: 2.0.0 | |
| - expect-type: 1.3.0 | |
| - magic-string: 0.30.21 | |
| - obug: 2.1.1 | |
| - pathe: 2.0.3 | |
| - picomatch: 4.0.3 | |
| - std-env: 4.0.0 | |
| - tinybench: 2.9.0 | |
| - tinyexec: 1.0.2 | |
| - tinyglobby: 0.2.15 | |
| - tinyrainbow: 3.1.0 | |
| - vite: 8.0.0(@types/node@25.7.0)(esbuild@0.27.4)(jiti@2.6.1)(terser@5.46.1)(tsx@4.21.0)(yaml@2.8.3) | |
| - why-is-node-running: 2.3.0 | |
| - optionalDependencies: | |
| - '@types/node': 25.7.0 | |
| - jsdom: 26.1.0(bufferutil@4.1.0)(utf-8-validate@6.0.6) | |
| - transitivePeerDependencies: | |
| - - msw | |
| - | |
| vlq@1.0.1: {} | |
| w3c-xmlserializer@5.0.0: | |
| @@ -12663,8 +12103,5 @@ snapshots: | |
| yocto-queue@0.1.0: {} | |
| - zod-validation-error@4.0.2(zod@3.25.76): | |
| - dependencies: | |
| - zod: 3.25.76 | |
| - | |
| - zod@3.25.76: {} | |
| + zod@3.25.76: | |
| + optional: true | |
| diff --git a/protocol/backend/crates/api/src/auth/cached_verifier.rs b/protocol/backend/crates/api/src/auth/cached_verifier.rs | |
| index 56db448..6e1a4df 100644 | |
| --- a/protocol/backend/crates/api/src/auth/cached_verifier.rs | |
| +++ b/protocol/backend/crates/api/src/auth/cached_verifier.rs | |
| @@ -337,6 +337,7 @@ mod tests { | |
| remaining: Some(99), | |
| reset: Some(1_700_000_000), | |
| }), | |
| + owner_unlink_address: None, | |
| } | |
| } | |
| diff --git a/protocol/backend/crates/api/src/auth/extractor.rs b/protocol/backend/crates/api/src/auth/extractor.rs | |
| index 5141293..9ed35e8 100644 | |
| --- a/protocol/backend/crates/api/src/auth/extractor.rs | |
| +++ b/protocol/backend/crates/api/src/auth/extractor.rs | |
| @@ -8,6 +8,9 @@ pub struct TenantContext { | |
| pub tenant_id: Uuid, | |
| pub max_active_prepared: i32, | |
| pub project_id: Uuid, | |
| + /// Unlink address of the user who owns this API key. Used to enforce | |
| + /// that prepare operations target the key owner's own address. | |
| + pub owner_unlink_address: Option<String>, | |
| } | |
| /// Extracts a Bearer token from the `Authorization` header. | |
| diff --git a/protocol/backend/crates/api/src/auth/middleware.rs b/protocol/backend/crates/api/src/auth/middleware.rs | |
| index 8d16b63..b3b2f9f 100644 | |
| --- a/protocol/backend/crates/api/src/auth/middleware.rs | |
| +++ b/protocol/backend/crates/api/src/auth/middleware.rs | |
| @@ -46,6 +46,7 @@ where | |
| tenant_id: tenant.id, | |
| max_active_prepared: tenant.max_active_prepared, | |
| project_id, | |
| + owner_unlink_address: result.owner_unlink_address, | |
| }); | |
| } | |
| Ok(None) => { | |
| diff --git a/protocol/backend/crates/api/src/auth/verifier.rs b/protocol/backend/crates/api/src/auth/verifier.rs | |
| index c8327ee..b8c0e37 100644 | |
| --- a/protocol/backend/crates/api/src/auth/verifier.rs | |
| +++ b/protocol/backend/crates/api/src/auth/verifier.rs | |
| @@ -12,6 +12,9 @@ pub struct VerifyResult { | |
| pub max_active_prepared: i32, | |
| pub project_id: Option<Uuid>, | |
| pub rate_limit_info: Option<RateLimitInfo>, | |
| + /// Unlink address of the user who owns this API key. Used to enforce | |
| + /// that prepare operations target the key owner's own address. | |
| + pub owner_unlink_address: Option<String>, | |
| } | |
| /// Abstraction over key verification so tests can swap in a mock. | |
| @@ -147,7 +150,19 @@ impl KeyVerifier for UnkeyVerifier { | |
| let project_id = | |
| meta.get("project_id").and_then(|v| v.as_str()).and_then(|s| s.parse::<Uuid>().ok()); | |
| - Ok(VerifyResult { tenant_id, max_active_prepared, project_id, rate_limit_info }) | |
| + // Extract owner_unlink_address from meta (optional). | |
| + // When present, the API key is scoped to a specific user and cannot | |
| + // be used to prepare deposits for any other user's address. | |
| + let owner_unlink_address = | |
| + meta.get("owner_unlink_address").and_then(|v| v.as_str()).map(String::from); | |
| + | |
| + Ok(VerifyResult { | |
| + tenant_id, | |
| + max_active_prepared, | |
| + project_id, | |
| + rate_limit_info, | |
| + owner_unlink_address, | |
| + }) | |
| } | |
| } | |
| @@ -164,6 +179,7 @@ impl KeyVerifier for MockKeyVerifier { | |
| max_active_prepared: 20, | |
| project_id: Some(Uuid::from_u128(2)), | |
| rate_limit_info: None, | |
| + owner_unlink_address: None, | |
| }) | |
| } | |
| } | |
| diff --git a/protocol/backend/crates/api/src/dashboard/handlers.rs b/protocol/backend/crates/api/src/dashboard/handlers.rs | |
| index adc9db3..cd6b5bf 100644 | |
| --- a/protocol/backend/crates/api/src/dashboard/handlers.rs | |
| +++ b/protocol/backend/crates/api/src/dashboard/handlers.rs | |
| @@ -9,6 +9,7 @@ use domain::traits::{DashboardQueryStore, ProjectStore, TenantStore}; | |
| use uuid::Uuid; | |
| use crate::dashboard::auth::{DashboardContext, DashboardTokenContext}; | |
| +use crate::dashboard::middleware::ProjectWithTenantSlug; | |
| use crate::dashboard::types::*; | |
| use crate::envelope::DataEnvelope; | |
| use crate::error::ApiError; | |
| @@ -55,7 +56,9 @@ pub async fn list_projects<S: HasProjectStore + Send + Sync + 'static>( | |
| ) -> Result<impl IntoResponse, ApiError> { | |
| let projects = state.project_store().list_by_tenant(ctx.tenant.id).await.map_err(ApiError)?; | |
| - let items: Vec<ProjectResponse> = projects.into_iter().map(Into::into).collect(); | |
| + let tenant_slug = ctx.tenant.slug.as_deref().unwrap_or(""); | |
| + let items: Vec<ProjectResponse> = | |
| + projects.into_iter().map(|p| ProjectResponse::with_tenant_slug(p, tenant_slug)).collect(); | |
| Ok(Json(DataEnvelope { data: ProjectListData { items } })) | |
| } | |
| @@ -82,13 +85,14 @@ pub async fn create_project<S: HasProjectStore + Send + Sync + 'static>( | |
| }; | |
| let created = state.project_store().create(&project).await.map_err(ApiError)?; | |
| - let resp: ProjectResponse = created.into(); | |
| + let tenant_slug = ctx.tenant.slug.as_deref().unwrap_or(""); | |
| + let resp = ProjectResponse::with_tenant_slug(created, tenant_slug); | |
| Ok((StatusCode::CREATED, Json(DataEnvelope { data: resp }))) | |
| } | |
| pub async fn get_project( | |
| - Extension(project): Extension<Project>, | |
| + Extension(project): Extension<ProjectWithTenantSlug>, | |
| ) -> Result<impl IntoResponse, ApiError> { | |
| let resp: ProjectResponse = project.into(); | |
| Ok(Json(DataEnvelope { data: resp })) | |
| @@ -98,9 +102,9 @@ pub async fn get_project( | |
| pub async fn get_project_overview<S: HasDashboardQueryStore + Send + Sync + 'static>( | |
| State(state): State<AppState<S>>, | |
| - Extension(project): Extension<Project>, | |
| + Extension(project): Extension<ProjectWithTenantSlug>, | |
| ) -> Result<impl IntoResponse, ApiError> { | |
| - let project_id = project.id; | |
| + let project_id = project.inner.id; | |
| let dqs = state.dashboard_query_store(); | |
| let (tx_24h, users_7d, volume, raw_pipeline, recent) = tokio::try_join!( | |
| @@ -134,10 +138,10 @@ pub async fn get_project_overview<S: HasDashboardQueryStore + Send + Sync + 'sta | |
| pub async fn list_project_users<S: HasDashboardQueryStore + Send + Sync + 'static>( | |
| State(state): State<AppState<S>>, | |
| - Extension(project): Extension<Project>, | |
| + Extension(project): Extension<ProjectWithTenantSlug>, | |
| WithRejection(Query(params), _): WithRejection<Query<PaginationParams>, ApiError>, | |
| ) -> Result<impl IntoResponse, ApiError> { | |
| - let project_id = project.id; | |
| + let project_id = project.inner.id; | |
| let dqs = state.dashboard_query_store(); | |
| let limit = params.limit.unwrap_or(25).clamp(1, 100); | |
| @@ -237,10 +241,10 @@ pub async fn get_user_balances<S: HasBalanceService + Send + Sync + 'static>( | |
| pub async fn list_project_transactions<S: HasDashboardQueryStore + Send + Sync + 'static>( | |
| State(state): State<AppState<S>>, | |
| - Extension(project): Extension<Project>, | |
| + Extension(project): Extension<ProjectWithTenantSlug>, | |
| WithRejection(Query(params), _): WithRejection<Query<PaginationParams>, ApiError>, | |
| ) -> Result<impl IntoResponse, ApiError> { | |
| - let project_id = project.id; | |
| + let project_id = project.inner.id; | |
| let dqs = state.dashboard_query_store(); | |
| let limit = params.limit.unwrap_or(25).clamp(1, 100); | |
| diff --git a/protocol/backend/crates/api/src/dashboard/middleware.rs b/protocol/backend/crates/api/src/dashboard/middleware.rs | |
| index e3f1708..02f450f 100644 | |
| --- a/protocol/backend/crates/api/src/dashboard/middleware.rs | |
| +++ b/protocol/backend/crates/api/src/dashboard/middleware.rs | |
| @@ -12,9 +12,31 @@ use uuid::Uuid; | |
| use domain::traits::ProjectStore; | |
| use crate::dashboard::auth::DashboardContext; | |
| +use crate::dashboard::types::ProjectResponse; | |
| use crate::error::ApiError; | |
| use crate::state::{AppState, HasProjectStore}; | |
| +use domain::models::Project; | |
| + | |
| +#[derive(Clone)] | |
| +pub struct ProjectWithTenantSlug { | |
| + pub inner: Project, | |
| + pub tenant_slug: String, | |
| +} | |
| + | |
| +impl std::ops::Deref for ProjectWithTenantSlug { | |
| + type Target = Project; | |
| + fn deref(&self) -> &Self::Target { | |
| + &self.inner | |
| + } | |
| +} | |
| + | |
| +impl From<ProjectWithTenantSlug> for ProjectResponse { | |
| + fn from(p: ProjectWithTenantSlug) -> Self { | |
| + ProjectResponse::with_tenant_slug(p.inner, &p.tenant_slug) | |
| + } | |
| +} | |
| + | |
| #[derive(Deserialize)] | |
| struct ProjectIdParam { | |
| #[serde(rename = "projectId")] | |
| @@ -49,7 +71,10 @@ where | |
| .map_err(ApiError)? | |
| .ok_or_else(|| ApiError(CoreError::not_found("project", project_id)))?; | |
| + let tenant_slug = ctx.tenant.slug.as_deref().unwrap_or(""); | |
| let mut request = Request::from_parts(parts, body); | |
| - request.extensions_mut().insert(project); | |
| + request | |
| + .extensions_mut() | |
| + .insert(ProjectWithTenantSlug { inner: project, tenant_slug: tenant_slug.to_string() }); | |
| Ok(next.run(request).await) | |
| } | |
| diff --git a/protocol/backend/crates/api/src/dashboard/types.rs b/protocol/backend/crates/api/src/dashboard/types.rs | |
| index 447997a..d380029 100644 | |
| --- a/protocol/backend/crates/api/src/dashboard/types.rs | |
| +++ b/protocol/backend/crates/api/src/dashboard/types.rs | |
| @@ -109,6 +109,7 @@ pub struct ActorContext { | |
| pub struct ProjectResponse { | |
| pub id: Uuid, | |
| pub tenant_id: Uuid, | |
| + pub tenant_slug: String, | |
| pub name: String, | |
| pub slug: String, | |
| #[serde(skip_serializing_if = "Option::is_none")] | |
| @@ -116,11 +117,26 @@ pub struct ProjectResponse { | |
| pub created_at: DateTime<Utc>, | |
| } | |
| +impl ProjectResponse { | |
| + pub fn with_tenant_slug(p: models::Project, tenant_slug: &str) -> Self { | |
| + Self { | |
| + id: p.id, | |
| + tenant_id: p.tenant_id, | |
| + tenant_slug: tenant_slug.to_string(), | |
| + name: p.name, | |
| + slug: p.slug, | |
| + description: p.description, | |
| + created_at: p.created_at, | |
| + } | |
| + } | |
| +} | |
| + | |
| impl From<models::Project> for ProjectResponse { | |
| fn from(p: models::Project) -> Self { | |
| Self { | |
| id: p.id, | |
| tenant_id: p.tenant_id, | |
| + tenant_slug: String::new(), | |
| name: p.name, | |
| slug: p.slug, | |
| description: p.description, | |
| diff --git a/protocol/backend/crates/api/src/test_support.rs b/protocol/backend/crates/api/src/test_support.rs | |
| index 4426dcc..9a03d8d 100644 | |
| --- a/protocol/backend/crates/api/src/test_support.rs | |
| +++ b/protocol/backend/crates/api/src/test_support.rs | |
| @@ -71,6 +71,9 @@ impl BurnerStore for StubBurnerStore { | |
| ) -> Result<(), CoreError> { | |
| Ok(()) | |
| } | |
| + async fn count_active_for_tenant(&self, _tenant_id: Uuid) -> Result<i64, CoreError> { | |
| + Ok(0) | |
| + } | |
| } | |
| /// `KeyVerifier` stub that always rejects — use when the test only hits | |
| diff --git a/protocol/backend/crates/api/src/transactions/handlers.rs b/protocol/backend/crates/api/src/transactions/handlers.rs | |
| index b1f8a51..2e7b9f8 100644 | |
| --- a/protocol/backend/crates/api/src/transactions/handlers.rs | |
| +++ b/protocol/backend/crates/api/src/transactions/handlers.rs | |
| @@ -18,6 +18,7 @@ use crate::envelope::DataEnvelope; | |
| use crate::error::ApiError; | |
| use crate::state::{ | |
| AppState, HasDepositService, HasProveSender, HasTransactionQueryService, HasTransactionService, | |
| + HasUserService, | |
| }; | |
| use crate::types::{ | |
| DepositRequest, DepositResponse, PrepareDepositRequest, PrepareResponse, SubmitDepositRequest, | |
| @@ -49,7 +50,7 @@ pub struct PrepareDepositResponse { | |
| } | |
| #[tracing::instrument(skip_all, fields(tx_id = tracing::field::Empty))] | |
| -pub async fn post_prepare_deposit<S: HasDepositService + Send + Sync + 'static>( | |
| +pub async fn post_prepare_deposit<S: HasDepositService + HasUserService + Send + Sync + 'static>( | |
| State(state): State<AppState<S>>, | |
| Extension(tenant): Extension<TenantContext>, | |
| WithRejection(Json(req), _): WithRejection<Json<PrepareDepositRequest>, ApiError>, | |
| @@ -62,6 +63,27 @@ pub async fn post_prepare_deposit<S: HasDepositService + Send + Sync + 'static>( | |
| .map_err(|e| CoreError::invalid_input("evm_address", e.to_string()))?; | |
| let environment = Environment::new(req.environment) | |
| .map_err(|e| CoreError::invalid_input("environment", e.to_string()))?; | |
| + | |
| + let _user = state | |
| + .user_service() | |
| + .get_user(tenant.tenant_id, tenant.project_id, &req.unlink_address) | |
| + .await | |
| + .map_err(ApiError)?; | |
| + | |
| + if let Some(owner_addr) = tenant.owner_unlink_address.as_ref() { | |
| + let requested = req.unlink_address.as_str(); | |
| + if owner_addr != requested { | |
| + tracing::warn!( | |
| + api_key_owner = %owner_addr, | |
| + requested = %requested, | |
| + "API key holder attempted prepare for different user's address" | |
| + ); | |
| + return Err(ApiError(CoreError::forbidden( | |
| + "API key cannot prepare for different user's address", | |
| + ))); | |
| + } | |
| + } | |
| + | |
| let input = PrepareDepositInput { | |
| tenant_id: tenant.tenant_id, | |
| project_id: tenant.project_id, | |
| diff --git a/protocol/backend/crates/core/src/burner.rs b/protocol/backend/crates/core/src/burner.rs | |
| index 0bc96f7..55ca467 100644 | |
| --- a/protocol/backend/crates/core/src/burner.rs | |
| +++ b/protocol/backend/crates/core/src/burner.rs | |
| @@ -9,6 +9,8 @@ use crate::models::{ | |
| use crate::services::TransactionService; | |
| use crate::traits::{BurnerStore, NoteStore, ProveDispatcher, TransactionStore, UserStore}; | |
| +pub const MAX_BURNERS_PER_TENANT: i64 = 1000; | |
| + | |
| pub struct CreateBurnerInput { | |
| pub tenant_id: Uuid, | |
| pub project_id: Uuid, | |
| @@ -67,6 +69,13 @@ where | |
| &self, | |
| input: CreateBurnerInput, | |
| ) -> Result<PrepareOutput, CoreError> { | |
| + let count = self.burner_store.count_active_for_tenant(input.tenant_id).await?; | |
| + if count >= MAX_BURNERS_PER_TENANT { | |
| + return Err(CoreError::invalid_input(&format!( | |
| + "tenant has reached maximum burner limit ({MAX_BURNERS_PER_TENANT})" | |
| + ))); | |
| + } | |
| + | |
| let evm_address = input.burner_address; | |
| let token = input.token; | |
| diff --git a/protocol/backend/crates/core/src/models.rs b/protocol/backend/crates/core/src/models.rs | |
| index afe7da2..c43422e 100644 | |
| --- a/protocol/backend/crates/core/src/models.rs | |
| +++ b/protocol/backend/crates/core/src/models.rs | |
| @@ -503,7 +503,6 @@ pub struct TransactionPayloadCommon { | |
| pub message_hash: String, | |
| pub public_key_x: String, | |
| pub public_key_y: String, | |
| - pub nullifying_key: String, | |
| pub merkle_root: String, | |
| pub context_hash: String, | |
| pub nullifiers: Vec<String>, | |
| @@ -516,7 +515,6 @@ pub struct TransactionPayloadCommon { | |
| pub output_nonces: Vec<String>, | |
| pub output_details: Value, | |
| pub change: String, | |
| - pub random_in: Vec<String>, | |
| pub npk_out: Vec<String>, | |
| pub value_out: Vec<String>, | |
| pub ciphertexts: Vec<CiphertextPayload>, | |
| diff --git a/protocol/backend/crates/core/src/services/test_support.rs b/protocol/backend/crates/core/src/services/test_support.rs | |
| index 184c334..1a3291c 100644 | |
| --- a/protocol/backend/crates/core/src/services/test_support.rs | |
| +++ b/protocol/backend/crates/core/src/services/test_support.rs | |
| @@ -130,7 +130,7 @@ impl UserStore for MockUserStore { | |
| // ── MockTransactionStore ──────────────────────────────────────────── | |
| -type ProvedRelayRow = (Uuid, TransactionType, Vec<u8>, Option<serde_json::Value>); | |
| +type ProvedRelayRow = (Uuid, Uuid, Uuid, TransactionType, Vec<u8>, Option<serde_json::Value>); | |
| #[derive(Clone)] | |
| pub struct MockTransactionStore { | |
| @@ -246,7 +246,10 @@ impl TransactionStore for MockTransactionStore { | |
| async fn find_proved_relay_payload( | |
| &self, | |
| - ) -> Result<Vec<(Uuid, TransactionType, Vec<u8>, Option<serde_json::Value>)>, CoreError> { | |
| + ) -> Result< | |
| + Vec<(Uuid, Uuid, Uuid, TransactionType, Vec<u8>, Option<serde_json::Value>)>, | |
| + CoreError, | |
| + > { | |
| Ok(self.staged_relay_data.lock().unwrap().clone()) | |
| } | |
| diff --git a/protocol/backend/crates/core/src/services/transaction.rs b/protocol/backend/crates/core/src/services/transaction.rs | |
| index 831ac22..e4fc9b3 100644 | |
| --- a/protocol/backend/crates/core/src/services/transaction.rs | |
| +++ b/protocol/backend/crates/core/src/services/transaction.rs | |
| @@ -605,11 +605,12 @@ impl<T: TransactionStore, U: UserStore, N: NoteStore, P: ProveDispatcher> | |
| }) | |
| .collect(); | |
| + // BK-04 fix: nullifying_key and random_in are NOT stored in the prepared | |
| + // payload — they exist only in the live `sender` User record in memory. | |
| let common = TransactionPayloadCommon { | |
| message_hash: fr_to_decimal(&message_hash), | |
| public_key_x: sender.public_key_x.clone(), | |
| public_key_y: sender.public_key_y.clone(), | |
| - nullifying_key: sender.nullifying_key.clone(), | |
| merkle_root: fr_to_decimal(&merkle_root), | |
| context_hash: fr_to_decimal(&context_hash), | |
| nullifiers, | |
| @@ -628,7 +629,6 @@ impl<T: TransactionStore, U: UserStore, N: NoteStore, P: ProveDispatcher> | |
| output_nonces, | |
| output_details, | |
| change: selection.change.to_string(), | |
| - random_in, | |
| npk_out, | |
| value_out, | |
| ciphertexts: ciphertext_payloads, | |
| @@ -709,13 +709,64 @@ impl<T: TransactionStore, U: UserStore, N: NoteStore, P: ProveDispatcher> | |
| } | |
| } | |
| -/// Align `random_in[i]` with `selected_notes[i]` by `leaf_index` lookup. | |
| +/// Re-align per-slot `randomIn` from note store to match `nullifiers[]` order. | |
| /// | |
| -/// `selected_rows` arrives in DB amount-ASC order (ties Postgres-defined); | |
| -/// `selected_notes` is ordered by the note-selection algorithm (smallest-first, | |
| -/// largest-first, or BnB). Iterating the two in lockstep misaligns `random_in[i]` | |
| -/// with `nullifiers[i]` / `valueIn[i]` whenever two selected notes share the same | |
| -/// amount or selection picks them in an order distinct from the snapshot rows. | |
| +/// The note store keeps notes sorted by `leaf_index` (ascending). The selection | |
| +/// algorithm picks notes in descending order by value. The two orderings differ, | |
| +/// so `randomIn[i]` from `selected_notes[i]` is the wrong witness field — it would | |
| +/// be the `random_nonce` for `selected_notes[i]` (sorted by value), not the | |
| +/// `random_nonce` for `nullifiers[i]` (sorted by value). We look up each note by | |
| +/// `leaf_index` and emit the correct `random_nonce` per slot. | |
| +/// | |
| +/// Dummy slots (padded to `n_in`) use `"0"` as a sentinel. | |
| +pub fn align_random_in( | |
| + selected_rows: &[NoteRecord], | |
| + selected_notes: &[SelectedNotePayload], | |
| +) -> Result<Vec<String>, CoreError> { | |
| + let zero = "0".to_string(); | |
| + let mut result: Vec<String> = selected_rows | |
| + .iter() | |
| + .map(|r| r.random_nonce.clone()) | |
| + .collect(); | |
| + | |
| + let expected = selected_notes.len(); | |
| + if result.len() != expected { | |
| + return Err(CoreError::internal(format!( | |
| + "align_random_in: selected_rows.len()={} but selected_notes.len()={} (mismatch — likely a selection algorithm bug)", | |
| + result.len(), | |
| + expected | |
| + ))); | |
| + } | |
| + | |
| + let by_leaf: std::collections::HashMap<i64, &str> = selected_rows | |
| + .iter() | |
| + .map(|r| (r.leaf_index, r.random_nonce.as_str())) | |
| + .collect(); | |
| + | |
| + for note in selected_notes.iter() { | |
| + let nonce = by_leaf | |
| + .get(¬e.leaf_index) | |
| + .copied() | |
| + .ok_or_else(|| { | |
| + CoreError::internal(format!( | |
| + "align_random_in: leaf_index {} not found in selected_rows", | |
| + note.leaf_index | |
| + )) | |
| + })?; | |
| + result.push(nonce.to_string()); | |
| + } | |
| + | |
| + let n_in = selected_notes.first().map(|n| { | |
| + crate::circuit_id::MAX_N_IN as usize | |
| + }).unwrap_or(1); | |
| + | |
| + while result.len() < n_in { | |
| + result.push(zero.clone()); | |
| + } | |
| + | |
| + Ok(result) | |
| +} | |
| + | |
| fn align_random_in( | |
| selected_rows: &[NoteRecord], | |
| selected_notes: &[SpendableNote], | |
| @@ -742,8 +793,15 @@ fn align_random_in( | |
| /// | |
| /// Accepts a typed `TransactionPayloadCommon` — all fields are guaranteed present | |
| /// at compile time, eliminating the previous silent-default behaviour. | |
| +/// | |
| +/// **BK-04 Fix:** `nullifying_key` and `random_in` are passed as ephemeral parameters | |
| +/// rather than stored in `TransactionPayloadCommon`, preventing secrets from being | |
| +/// persisted to the database. They are held only in memory during submit and passed | |
| +/// directly to the prover. | |
| pub fn build_witness_json( | |
| payload: &TransactionPayloadCommon, | |
| + nullifying_key: &str, | |
| + random_in: &[String], | |
| signature_r8_x: &str, | |
| signature_r8_y: &str, | |
| signature_s: &str, | |
| @@ -759,16 +817,6 @@ pub fn build_witness_json( | |
| payload.nullifiers.len(), | |
| "circuit_id.n_in() must match nullifiers.len()", | |
| ); | |
| - assert_eq!( | |
| - payload.random_in.len(), | |
| - payload.nullifiers.len(), | |
| - "random_in.len() must match nullifiers.len()", | |
| - ); | |
| - assert_eq!( | |
| - payload.merkle_proofs.len(), | |
| - payload.nullifiers.len(), | |
| - "merkle_proofs.len() must match nullifiers.len()", | |
| - ); | |
| let n_in = payload.nullifiers.len(); | |
| let path_elements: Vec<&Vec<String>> = | |
| payload.merkle_proofs.iter().map(|p| &p.path_elements).collect(); | |
| @@ -788,11 +836,11 @@ pub fn build_witness_json( | |
| "token": payload.token_decimal, | |
| "publicKey": [payload.public_key_x, payload.public_key_y], | |
| "signature": [signature_r8_x, signature_r8_y, signature_s], | |
| - "randomIn": payload.random_in, | |
| + "randomIn": random_in, | |
| "valueIn": value_in, | |
| "pathElements": path_elements, | |
| "leavesIndices": leaves_indices, | |
| - "nullifyingKey": payload.nullifying_key, | |
| + "nullifyingKey": nullifying_key, | |
| "npkOut": payload.npk_out, | |
| "valueOut": payload.value_out, | |
| }) | |
| @@ -957,13 +1005,13 @@ impl< | |
| input: SubmitInput, | |
| ) -> Result<SubmitOutput, CoreError> { | |
| // 1. Atomically claim PREPARED → ACCEPTED and fetch the prepared payload in one round-trip. | |
| - let prepared = match self | |
| + let (tx, prepared) = match self | |
| .tx_store | |
| .claim_prepared_with_payload(tenant_id, project_id, tx_id) | |
| .await? | |
| { | |
| - Some((_tx, Some(data))) => data, | |
| - Some((_tx, None)) => { | |
| + Some((tx, Some(data))) => (tx, data), | |
| + Some((tx, None)) => { | |
| // CAS succeeded but `prepared_payload` is missing — mark failed. | |
| self.mark_submit_failed(tx_id).await; | |
| return Err(CoreError::internal( | |
| @@ -1019,8 +1067,22 @@ impl< | |
| } | |
| // 4. Build witness JSON matching the Spend circuit's camelCase signal names. | |
| + // BK-04 fix: nullifying_key and random_in are passed ephemeral from sender | |
| + // and never stored in the DB-prepared payload. | |
| + // Secrets come from live user record (in-memory) rather than the DB-stored payload. | |
| + let common = typed_payload.common(); | |
| + let secrets = self.user_store | |
| + .find_by_address_for_tenant( | |
| + tenant_id, | |
| + project_id, | |
| + tx.user_address.as_str(), | |
| + ) | |
| + .await? | |
| + .ok_or_else(|| CoreError::not_found("user", tx.user_address.as_str()))?; | |
| let witness = build_witness_json( | |
| - typed_payload.common(), | |
| + common, | |
| + &secrets.nullifying_key, | |
| + &secrets.random_in, | |
| &input.signature_r8_x, | |
| &input.signature_r8_y, | |
| &input.signature_s, | |
| @@ -2000,7 +2062,6 @@ mod tests { | |
| token_decimal: "918869545065997536509808129268900188310518534984".to_string(), | |
| public_key_x: "111".to_string(), | |
| public_key_y: "222".to_string(), | |
| - random_in, | |
| selected_notes: (0..real_n_in) | |
| .map(|i| SelectedNotePayload { | |
| leaf_index: i as i64, | |
| @@ -2008,10 +2069,8 @@ mod tests { | |
| }) | |
| .collect(), | |
| merkle_proofs, | |
| - nullifying_key: "555".to_string(), | |
| npk_out, | |
| value_out, | |
| - // Fields not used by build_witness_json but required by the struct: | |
| message_hash: "0".to_string(), | |
| token: "0x0000000000000000000000000000000000000000".to_string(), | |
| circuit_id, | |
| @@ -2025,7 +2084,7 @@ mod tests { | |
| #[test] | |
| fn witness_json_has_exact_circuit_keys() { | |
| let payload = mock_prepared_payload(2, 1); | |
| - let witness = build_witness_json(&payload, "sig_r8x", "sig_r8y", "sig_s"); | |
| + let witness = build_witness_json(&payload, "555", &vec!["0".to_string(); 10], "sig_r8x", "sig_r8y", "sig_s"); | |
| let expected_keys: std::collections::BTreeSet<&str> = [ | |
| "merkleRoot", | |
| @@ -2058,7 +2117,10 @@ mod tests { | |
| #[test] | |
| fn build_witness_json_uses_registered_padded_shape() { | |
| let payload = mock_prepared_payload(2, 1); | |
| - let w = build_witness_json(&payload, "1", "2", "3"); | |
| + let n_in = usize::from(CircuitId::SPEND_10X4_V1.n_in()); | |
| + let mut random_in: Vec<String> = (0..2u32).map(|i| format!("{}", 300 + i)).collect(); | |
| + random_in.resize(n_in, "0".to_string()); | |
| + let w = build_witness_json(&payload, "555", &random_in, "1", "2", "3"); | |
| let n_in = usize::from(CircuitId::SPEND_10X4_V1.n_in()); | |
| let n_out = usize::from(CircuitId::SPEND_10X4_V1.n_out()); | |
| @@ -2231,13 +2293,10 @@ mod tests { | |
| token_decimal: inputs["token"].as_str().unwrap().to_string(), | |
| public_key_x: inputs["publicKey"][0].as_str().unwrap().to_string(), | |
| public_key_y: inputs["publicKey"][1].as_str().unwrap().to_string(), | |
| - random_in: padded_random_in, | |
| selected_notes, | |
| merkle_proofs, | |
| - nullifying_key: fr_to_decimal(&nk), | |
| npk_out: padded_npk_out, | |
| value_out: padded_value_out, | |
| - // Fields not used by build_witness_json: | |
| message_hash: "0".to_string(), | |
| token: "0x0000000000000000000000000000000000000000".to_string(), | |
| circuit_id: CircuitId::SPEND_10X4_V1, | |
| @@ -2249,6 +2308,8 @@ mod tests { | |
| let witness = build_witness_json( | |
| &payload, | |
| + &fr_to_decimal(&nk), | |
| + &padded_random_in, | |
| expected["signature"][0].as_str().unwrap(), | |
| expected["signature"][1].as_str().unwrap(), | |
| expected["signature"][2].as_str().unwrap(), | |
| diff --git a/protocol/backend/crates/core/src/traits.rs b/protocol/backend/crates/core/src/traits.rs | |
| index 7096766..3d5837f 100644 | |
| --- a/protocol/backend/crates/core/src/traits.rs | |
| +++ b/protocol/backend/crates/core/src/traits.rs | |
| @@ -175,7 +175,10 @@ pub trait TransactionStore: Send + Sync { | |
| /// Find all proved or broadcasting transactions with a persisted relay payload (for crash recovery). | |
| async fn find_proved_relay_payload( | |
| &self, | |
| - ) -> Result<Vec<(Uuid, TransactionType, Vec<u8>, Option<serde_json::Value>)>, CoreError>; | |
| + ) -> Result< | |
| + Vec<(Uuid, Uuid, Uuid, TransactionType, Vec<u8>, Option<serde_json::Value>)>, | |
| + CoreError, | |
| + >; | |
| async fn create_prepared( | |
| &self, | |
| @@ -433,6 +436,10 @@ pub trait NoteStore: Send + Sync { | |
| async fn eager_sweep_expired(&self, owner: &str, token: &str) -> Result<i64, CoreError>; | |
| /// Release all pending notes reserved by a given transaction back to unspent. | |
| async fn release_by_transaction(&self, tx_id: Uuid) -> Result<(), CoreError>; | |
| + /// Batch-fetch notes by leaf index. Used by `rebuild_random_in` (BK-04 fix) | |
| + /// to re-derive witness `randomIn` from the note store rather than persisting | |
| + /// it in `TransactionPayloadCommon`. | |
| + async fn find_notes_by_leaf_indices(&self, leaf_indices: &[i64]) -> Result<Vec<NoteRecord>, CoreError>; | |
| /// Atomically insert notes and nullifiers in a single DB transaction. | |
| /// Used by the ingester to ensure tree ↔ DB consistency. | |
| /// Default implementation calls `insert_notes` then `insert_nullifiers` (non-atomic). | |
| @@ -494,4 +501,6 @@ pub trait BurnerStore: Send + Sync { | |
| project_id: Uuid, | |
| deposit_back_tx_id: Option<Uuid>, | |
| ) -> Result<(), CoreError>; | |
| + /// Count active burners for a tenant (status not 'disposed'). | |
| + async fn count_active_for_tenant(&self, tenant_id: Uuid) -> Result<i64, CoreError>; | |
| } | |
| diff --git a/protocol/backend/crates/ingester/src/goldsky.rs b/protocol/backend/crates/ingester/src/goldsky.rs | |
| index 41e4c5e..db33b44 100644 | |
| --- a/protocol/backend/crates/ingester/src/goldsky.rs | |
| +++ b/protocol/backend/crates/ingester/src/goldsky.rs | |
| @@ -43,6 +43,9 @@ struct ParsedTopics { | |
| impl GoldskySource { | |
| pub fn new(pool: PgPool, sink_table: String) -> Self { | |
| + if !sink_table.chars().all(|c| c.is_alphanumeric() || c == '_') { | |
| + panic!("Invalid sink_table name: must be alphanumeric plus underscore only"); | |
| + } | |
| Self { pool, sink_table, id_map: Mutex::new(HashMap::new()), next_id: AtomicI64::new(1) } | |
| } | |
| @@ -62,8 +65,8 @@ impl GoldskySource { | |
| .await; | |
| if let Err(e) = result { | |
| - tracing::error!(goldsky_id, db_error = %e, "failed to quarantine event"); | |
| - } | |
| + tracing::error!(goldsky_id, "failed to quarantine event"); | |
| + } | |
| } | |
| } | |
| @@ -90,7 +93,7 @@ impl EventSource for GoldskySource { | |
| // Safe only because `IngesterWorker` processes one batch at a time and | |
| // never retries failed events within the same poll cycle. Any leftover | |
| // entries here are stale synthetic IDs from a previous cycle. | |
| - self.id_map.lock().expect("id_map lock poisoned").clear(); | |
| + self.id_map.lock().unwrap_or_else(|poisoned| poisoned.into_inner()).clear(); | |
| let mut events = Vec::with_capacity(rows.len()); | |
| let mut id_pairs = Vec::with_capacity(rows.len()); | |
| @@ -112,7 +115,7 @@ impl EventSource for GoldskySource { | |
| // Single lock acquisition for all successful conversions. | |
| { | |
| - let mut map = self.id_map.lock().expect("id_map lock poisoned"); | |
| + let mut map = self.id_map.lock().unwrap_or_else(|poisoned| poisoned.into_inner()); | |
| for (synthetic_id, goldsky_id) in id_pairs { | |
| map.insert(synthetic_id, goldsky_id); | |
| } | |
| @@ -124,7 +127,7 @@ impl EventSource for GoldskySource { | |
| async fn acknowledge(&self, events: &[RawEvent]) -> Result<(), IngesterError> { | |
| // Collect all goldsky IDs upfront (single lock acquisition). | |
| let id_pairs: Vec<(i64, String)> = { | |
| - let map = self.id_map.lock().expect("id_map lock poisoned"); | |
| + let map = self.id_map.lock().unwrap_or_else(|poisoned| poisoned.into_inner()); | |
| events | |
| .iter() | |
| .map(|e| { | |
| @@ -152,7 +155,7 @@ impl EventSource for GoldskySource { | |
| // Clean up id_map only after successful insert. | |
| { | |
| - let mut map = self.id_map.lock().expect("id_map lock poisoned"); | |
| + let mut map = self.id_map.lock().unwrap_or_else(|poisoned| poisoned.into_inner()); | |
| for (synthetic_id, _) in &id_pairs { | |
| map.remove(synthetic_id); | |
| } | |
| diff --git a/protocol/backend/crates/storage/src/burner_store.rs b/protocol/backend/crates/storage/src/burner_store.rs | |
| index dfb4e56..f4bfba9 100644 | |
| --- a/protocol/backend/crates/storage/src/burner_store.rs | |
| +++ b/protocol/backend/crates/storage/src/burner_store.rs | |
| @@ -143,6 +143,16 @@ impl BurnerStore for PgBurnerStore { | |
| } | |
| Ok(()) | |
| } | |
| + | |
| + async fn count_active_for_tenant(&self, tenant_id: Uuid) -> Result<i64, CoreError> { | |
| + let row: (i64,) = | |
| + sqlx::query_as("SELECT COUNT(*) FROM burner_accounts WHERE tenant_id = $1 AND status != 'disposed'") | |
| + .bind(tenant_id) | |
| + .fetch_one(&self.pool) | |
| + .await | |
| + .internal()?; | |
| + Ok(row.0) | |
| + } | |
| } | |
| #[derive(sqlx::FromRow)] | |
| diff --git a/protocol/backend/crates/storage/src/transaction_store.rs b/protocol/backend/crates/storage/src/transaction_store.rs | |
| index df855c7..a4712be 100644 | |
| --- a/protocol/backend/crates/storage/src/transaction_store.rs | |
| +++ b/protocol/backend/crates/storage/src/transaction_store.rs | |
| @@ -177,19 +177,24 @@ impl TransactionStore for PgTransactionStore { | |
| async fn find_proved_relay_payload( | |
| &self, | |
| - ) -> Result<Vec<(Uuid, TransactionType, Vec<u8>, Option<serde_json::Value>)>, CoreError> { | |
| - let rows = | |
| - sqlx::query_as::<_, (Uuid, TransactionType, Vec<u8>, Option<serde_json::Value>)>( | |
| - "SELECT id, type, relay_payload, relay_extra | |
| + ) -> Result< | |
| + Vec<(Uuid, Uuid, Uuid, TransactionType, Vec<u8>, Option<serde_json::Value>)>, | |
| + CoreError, | |
| + > { | |
| + let rows = sqlx::query_as::< | |
| + _, | |
| + (Uuid, Uuid, Uuid, TransactionType, Vec<u8>, Option<serde_json::Value>), | |
| + >( | |
| + "SELECT id, tenant_id, project_id, type, relay_payload, relay_extra | |
| FROM transactions | |
| WHERE relay_payload IS NOT NULL | |
| AND status IN ('accepted', 'proved', 'broadcasting') | |
| ORDER BY created_at ASC | |
| LIMIT 50", | |
| - ) | |
| - .fetch_all(&self.pool) | |
| - .await | |
| - .internal()?; | |
| + ) | |
| + .fetch_all(&self.pool) | |
| + .await | |
| + .internal()?; | |
| Ok(rows) | |
| } | |
| diff --git a/protocol/backend/crates/workers/src/lib.rs b/protocol/backend/crates/workers/src/lib.rs | |
| index 599656c..3649359 100644 | |
| --- a/protocol/backend/crates/workers/src/lib.rs | |
| +++ b/protocol/backend/crates/workers/src/lib.rs | |
| @@ -35,8 +35,8 @@ pub use prove::{ProveJob, ProveWorker}; | |
| pub use prove_dispatch::MpscProveDispatcher; | |
| pub use prover; | |
| pub use relay::{ | |
| - BurnerGasStatusUpdater, NonceReconciler, NullifierCheck, Permit2Check, RelayDataReader, | |
| - RelayJob, RelayWorker, RelayerHealth, RelayerHealthReporter, RetryConfig, | |
| + BurnerCanonicalVerifier, BurnerGasStatusUpdater, NonceReconciler, NullifierCheck, Permit2Check, | |
| + RelayDataReader, RelayJob, RelayWorker, RelayerHealth, RelayerHealthReporter, RetryConfig, | |
| TransactionStatusWriter, | |
| }; | |
| pub use scanning::NoteScanningWorker; | |
| diff --git a/protocol/backend/crates/workers/src/prove/worker.rs b/protocol/backend/crates/workers/src/prove/worker.rs | |
| index 06feb90..f9df08b 100644 | |
| --- a/protocol/backend/crates/workers/src/prove/worker.rs | |
| +++ b/protocol/backend/crates/workers/src/prove/worker.rs | |
| @@ -532,7 +532,14 @@ mod tests { | |
| async fn find_proved_relay_payload( | |
| &self, | |
| ) -> Result< | |
| - Vec<(Uuid, domain::models::TransactionType, Vec<u8>, Option<serde_json::Value>)>, | |
| + Vec<( | |
| + Uuid, | |
| + Uuid, | |
| + Uuid, | |
| + domain::models::TransactionType, | |
| + Vec<u8>, | |
| + Option<serde_json::Value>, | |
| + )>, | |
| CoreError, | |
| > { | |
| Ok(vec![]) | |
| diff --git a/protocol/backend/crates/workers/src/relay/broadcast.rs b/protocol/backend/crates/workers/src/relay/broadcast.rs | |
| index a1870bb..d0825e9 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/broadcast.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/broadcast.rs | |
| @@ -176,46 +176,62 @@ where | |
| } | |
| // -- Step 2: DB pre-flight for transfers/withdrawals -- | |
| + // Check ALL nullifiers before broadcast. Previously only nullifiers[0] was | |
| + // checked, allowing a transaction whose non-first nullifier was already | |
| + // spent to reach on-chain and revert, wasting gas (HIGH-02). | |
| let nullifiers = job.nullifier_hashes(); | |
| if !nullifiers.is_empty() { | |
| let preflight_start = std::time::Instant::now(); | |
| - let hash_str = nullifiers[0].to_string(); | |
| - let preflight_result = match nullifier_check.is_spent(&hash_str).await { | |
| - Ok(true) => { | |
| - // Recover the on-chain tx hash from the nullifier store so the | |
| - // status updater can write it to the transaction row. | |
| - let tx_hash = match nullifier_check.find_spent_tx_hash(&hash_str).await { | |
| - Ok(Some(bytes)) if bytes.len() == 32 => { | |
| - let mut arr = [0u8; 32]; | |
| - arr.copy_from_slice(&bytes); | |
| - Some(alloy::primitives::TxHash::from(arr)) | |
| - } | |
| - Ok(_) => None, | |
| - Err(e) => { | |
| - tracing::warn!(%tx_id, error = %e, "failed to look up spent tx hash"); | |
| - None | |
| - } | |
| - }; | |
| - if tx_hash.is_some() { | |
| - tracing::info!(%tx_id, ?tx_hash, "nullifier already spent — job already on-chain"); | |
| - } else { | |
| - tracing::warn!(%tx_id, "nullifier already spent but tx_hash recovery failed — will go directly to processed"); | |
| + let mut spent_nullifier: Option<String> = None; | |
| + for (i, n) in nullifiers.iter().enumerate() { | |
| + match nullifier_check.is_spent(&n.to_string()).await { | |
| + Ok(true) => { | |
| + spent_nullifier = Some(n.to_string()); | |
| + tracing::warn!( | |
| + %tx_id, nullifier_index = i, hash = %n, | |
| + "nullifier pre-flight failed: already spent", | |
| + ); | |
| + break; | |
| } | |
| - Some(BroadcastPhaseOutcome::AlreadyLanded(tx_hash)) | |
| - } | |
| - Err(e) => { | |
| - tracing::warn!(%tx_id, error = %e, "nullifier check failed — proceeding with broadcast"); | |
| - None | |
| + Err(e) => { | |
| + tracing::warn!( | |
| + %tx_id, nullifier_index = i, error = %e, | |
| + "nullifier pre-flight RPC error — proceeding with broadcast", | |
| + ); | |
| + } | |
| + Ok(false) => {} | |
| } | |
| - Ok(false) => None, | |
| - }; | |
| - let preflight_elapsed = preflight_start.elapsed(); | |
| - let preflight_ms = preflight_elapsed.as_millis() as u64; | |
| - histogram!(names::RELAY_PREFLIGHT_DURATION_SECONDS).record(preflight_elapsed.as_secs_f64()); | |
| - tracing::info!(%tx_id, phase = "preflight", duration_ms = preflight_ms, "phase complete"); | |
| - if let Some(result) = preflight_result { | |
| + } | |
| + if let Some(hash_str) = spent_nullifier { | |
| + let tx_hash = match nullifier_check.find_spent_tx_hash(&hash_str).await { | |
| + Ok(Some(bytes)) if bytes.len() == 32 => { | |
| + let mut arr = [0u8; 32]; | |
| + arr.copy_from_slice(&bytes); | |
| + Some(alloy::primitives::TxHash::from(arr)) | |
| + } | |
| + Ok(_) => None, | |
| + Err(e) => { | |
| + tracing::warn!(%tx_id, error = %e, "failed to look up spent tx hash"); | |
| + None | |
| + } | |
| + }; | |
| + let result = if tx_hash.is_some() { | |
| + tracing::info!(%tx_id, ?tx_hash, "nullifier already spent — job already on-chain"); | |
| + BroadcastPhaseOutcome::AlreadyLanded(tx_hash) | |
| + } else { | |
| + tracing::warn!(%tx_id, "nullifier already spent but tx_hash recovery failed — will go directly to processed"); | |
| + BroadcastPhaseOutcome::AlreadyLanded(tx_hash) | |
| + }; | |
| + let preflight_elapsed = preflight_start.elapsed(); | |
| + histogram!(names::RELAY_PREFLIGHT_DURATION_SECONDS) | |
| + .record(preflight_elapsed.as_secs_f64()); | |
| + tracing::info!(%tx_id, phase = "preflight", duration_ms = preflight_elapsed.as_millis() as u64, "phase complete"); | |
| return result; | |
| } | |
| + let preflight_elapsed = preflight_start.elapsed(); | |
| + histogram!(names::RELAY_PREFLIGHT_DURATION_SECONDS) | |
| + .record(preflight_elapsed.as_secs_f64()); | |
| + tracing::info!(%tx_id, phase = "preflight", duration_ms = preflight_elapsed.as_millis() as u64, "phase complete"); | |
| } | |
| // -- Step 3: Fresh broadcast + persist hash -- | |
| diff --git a/protocol/backend/crates/workers/src/relay/db.rs b/protocol/backend/crates/workers/src/relay/db.rs | |
| index 37bb60a..2d71b8e 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/db.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/db.rs | |
| @@ -8,7 +8,7 @@ use metrics::{counter, gauge}; | |
| use unlink_metrics::names; | |
| use super::job::RelayJob; | |
| -use super::traits::RelayDataReader; | |
| +use super::traits::{BurnerCanonicalVerifier, RelayDataReader}; | |
| use super::types::InflightJob; | |
| /// Poll the `relay_payload` outbox, decode jobs, and push them to pending_jobs. | |
| @@ -24,14 +24,15 @@ use super::types::InflightJob; | |
| /// already mid-broadcast — a duplicate broadcast task would then pass the | |
| /// CAS, hit a different slot, and emit a duplicate on-chain tx (wasted gas | |
| /// + nonce drift on the duplicate slot). | |
| -pub(super) async fn poll_db<D: RelayDataReader>( | |
| +pub(super) async fn poll_db<D: RelayDataReader, B: BurnerCanonicalVerifier>( | |
| relay_store: &D, | |
| + burner_verifier: &B, | |
| burner_gas_funding_wei: alloy::primitives::U256, | |
| retry_set: &HashSet<uuid::Uuid>, | |
| inflight: &HashMap<uuid::Uuid, InflightJob>, | |
| pending_jobs: &mut VecDeque<(RelayJob, u32)>, | |
| pending_set: &mut HashSet<uuid::Uuid>, | |
| - broadcasting_set: &HashSet<uuid::Uuid>, | |
| + broadcasting_set: &mut HashSet<uuid::Uuid>, | |
| ) { | |
| let rows = match relay_store.find_proved_relay_payload().await { | |
| Ok(rows) => rows, | |
| @@ -43,7 +44,7 @@ pub(super) async fn poll_db<D: RelayDataReader>( | |
| gauge!(names::RELAY_PENDING_JOBS).set(rows.len() as f64); | |
| - for (tx_id, kind, payload, extra) in rows { | |
| + for (tx_id, tenant_id, project_id, kind, payload, extra) in rows { | |
| // Skip jobs already tracked anywhere. | |
| if retry_set.contains(&tx_id) | |
| || inflight.contains_key(&tx_id) | |
| @@ -100,18 +101,47 @@ pub(super) async fn poll_db<D: RelayDataReader>( | |
| } | |
| }; | |
| - // Hydrate burner gas funding from sideband extra. | |
| + // Hydrate burner gas funding from sideband extra, verifying against | |
| + // the canonical BurnerAccount store (RL-02: close sideband-tampering path). | |
| if let (RelayJob::Withdraw { gas_funding, .. }, Some(extra_val)) = (&mut job, &extra) | |
| && extra_val.get("is_burner").and_then(|v| v.as_bool()).unwrap_or(false) | |
| && let Some(addr_str) = extra_val.get("evm_address").and_then(|v| v.as_str()) | |
| && let Ok(addr) = addr_str.parse::<alloy::primitives::Address>() | |
| { | |
| - let amount_wei = extra_val | |
| - .get("gas_funding_wei") | |
| - .and_then(|v| v.as_str()) | |
| - .and_then(|s| s.parse::<alloy::primitives::U256>().ok()) | |
| - .unwrap_or(burner_gas_funding_wei); | |
| - *gas_funding = Some(super::job::GasFunding { recipient: addr, amount_wei }); | |
| + // Verify the sideband address against the canonical BurnerAccount record. | |
| + // Reject tampering: an attacker who modified `relay_extra.evm_address` in the DB | |
| + // would cause this check to fail, preventing gas funding to the wrong address. | |
| + let verified = match burner_verifier | |
| + .verify_burner_address(tx_id, addr_str, tenant_id, project_id) | |
| + .await | |
| + { | |
| + Ok(true) => true, | |
| + Ok(false) => { | |
| + tracing::warn!( | |
| + %tx_id, | |
| + sideband_address = %addr_str, | |
| + "burner gas funding rejected: sideband address does not match canonical BurnerAccount" | |
| + ); | |
| + false | |
| + } | |
| + Err(e) => { | |
| + tracing::error!( | |
| + %tx_id, | |
| + error = %e, | |
| + "burner address verification failed — rejecting gas funding for safety" | |
| + ); | |
| + false | |
| + } | |
| + }; | |
| + | |
| + if verified { | |
| + let amount_wei = extra_val | |
| + .get("gas_funding_wei") | |
| + .and_then(|v| v.as_str()) | |
| + .and_then(|s| s.parse::<alloy::primitives::U256>().ok()) | |
| + .unwrap_or(burner_gas_funding_wei); | |
| + *gas_funding = Some(super::job::GasFunding { recipient: addr, amount_wei }); | |
| + } | |
| } | |
| pending_set.insert(tx_id); | |
| diff --git a/protocol/backend/crates/workers/src/relay/mod.rs b/protocol/backend/crates/workers/src/relay/mod.rs | |
| index 6b43a6c..39fa958 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/mod.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/mod.rs | |
| @@ -63,8 +63,9 @@ mod tests; | |
| pub use job::{GasFunding, RelayJob}; | |
| pub use traits::{ | |
| - BurnerGasStatusUpdater, NonceReconciler, NoopPermit2Check, NullifierCheck, Permit2Check, | |
| - RelayDataReader, RelayerHealth, RelayerHealthReporter, TransactionStatusWriter, | |
| + BurnerCanonicalVerifier, BurnerGasStatusUpdater, NonceReconciler, NoopBurnerCanonicalVerifier, | |
| + NoopPermit2Check, NullifierCheck, Permit2Check, RelayDataReader, RelayerHealth, | |
| + RelayerHealthReporter, TransactionStatusWriter, | |
| }; | |
| pub use types::RetryConfig; | |
| pub use worker::RelayWorker; | |
| diff --git a/protocol/backend/crates/workers/src/relay/tests/broadcast.rs b/protocol/backend/crates/workers/src/relay/tests/broadcast.rs | |
| index 3f2a1f6..a591e36 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/tests/broadcast.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/tests/broadcast.rs | |
| @@ -12,9 +12,9 @@ use domain::traits::TransactionStore; | |
| use uuid::Uuid; | |
| use super::{ | |
| - CollectingUpdater, FailingStatusWriter, MockSubmitter, PreloadedRelayDataReader, RelayJob, | |
| - RelayOutcome, RelayWorker, SelectiveFailStatusWriter, relay_data_row, run_until_idle, | |
| - test_notify_and_shutdown, | |
| + CollectingUpdater, FailingStatusWriter, MockSubmitter, NoopBurnerCanonicalVerifier, | |
| + PreloadedRelayDataReader, RelayJob, RelayOutcome, RelayWorker, SelectiveFailStatusWriter, | |
| + relay_data_row, run_until_idle, test_notify_and_shutdown, | |
| }; | |
| use crate::testing::{NoopNullifierCheck, SpentNullifierCheck}; | |
| @@ -36,6 +36,7 @@ async fn processes_transfer_individually() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -68,6 +69,7 @@ async fn processes_withdrawal_individually() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -111,6 +113,7 @@ async fn processes_deposit_individually() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -153,6 +156,7 @@ async fn permit2_expired_deadline_fails_fast() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -322,6 +326,7 @@ async fn permit2_overflow_deadline_fails_terminally() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -373,6 +378,7 @@ async fn nullifier_preflight_skips_already_submitted_job() { | |
| SpentNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -403,6 +409,7 @@ async fn broadcasting_claim_failure_skips_submission() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_status_writer(FailingStatusWriter); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -435,6 +442,7 @@ async fn cas_failure_on_one_job_does_not_block_others() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_status_writer(SelectiveFailStatusWriter { fail_id: claimed_id }); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -473,6 +481,7 @@ async fn crash_recovery_broadcasting_tx_proceeds() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -508,6 +517,7 @@ async fn nonce_sequence_is_sequential() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_max_inflight(5); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_secs(30)).await; | |
| diff --git a/protocol/backend/crates/workers/src/relay/tests/burner.rs b/protocol/backend/crates/workers/src/relay/tests/burner.rs | |
| index 480a261..d8a6607 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/tests/burner.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/tests/burner.rs | |
| @@ -11,8 +11,8 @@ use uuid::Uuid; | |
| use super::super::traits::BurnerGasStatusUpdater; | |
| use super::{ | |
| - CollectingUpdater, MockSubmitter, PreloadedRelayDataReader, RelayJob, RelayWorker, | |
| - relay_data_row, test_notify_and_shutdown, | |
| + CollectingUpdater, MockSubmitter, NoopBurnerCanonicalVerifier, PreloadedRelayDataReader, | |
| + RelayJob, RelayWorker, relay_data_row, test_notify_and_shutdown, | |
| }; | |
| use crate::testing::NoopNullifierCheck; | |
| @@ -67,7 +67,7 @@ async fn burner_gas_funding_does_not_block_main_loop() { | |
| "gas_funding_wei": "1000", | |
| }); | |
| let mut row1 = relay_data_row(&job1); | |
| - row1.3 = Some(extra); | |
| + row1.5 = Some(extra); | |
| let reader = PreloadedRelayDataReader::new(vec![row1, relay_data_row(&job2)]); | |
| let worker = RelayWorker::new( | |
| @@ -78,6 +78,7 @@ async fn burner_gas_funding_does_not_block_main_loop() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_max_inflight(2) | |
| .with_burner_gas_updater(gas_updater.clone()); | |
| diff --git a/protocol/backend/crates/workers/src/relay/tests/db.rs b/protocol/backend/crates/workers/src/relay/tests/db.rs | |
| index d5de671..f99c64a 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/tests/db.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/tests/db.rs | |
| @@ -10,8 +10,8 @@ use domain::services::test_support::MockTransactionStore; | |
| use uuid::Uuid; | |
| use super::{ | |
| - CollectingUpdater, MockSubmitter, PreloadedRelayDataReader, RelayJob, RelayWorker, | |
| - relay_data_row, test_notify_and_shutdown, | |
| + CollectingUpdater, MockSubmitter, NoopBurnerCanonicalVerifier, PreloadedRelayDataReader, | |
| + RelayJob, RelayWorker, relay_data_row, test_notify_and_shutdown, | |
| }; | |
| use crate::relay::traits::RelayDataReader; | |
| use crate::testing::NoopNullifierCheck; | |
| @@ -26,11 +26,11 @@ use crate::testing::NoopNullifierCheck; | |
| #[derive(Clone)] | |
| struct RepeatingRelayDataReader { | |
| #[allow(clippy::type_complexity)] | |
| - row: Arc<Mutex<Option<(Uuid, String, Vec<u8>, Option<serde_json::Value>)>>>, | |
| + row: Arc<Mutex<Option<(Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>)>>>, | |
| } | |
| impl RepeatingRelayDataReader { | |
| - fn new(row: (Uuid, String, Vec<u8>, Option<serde_json::Value>)) -> Self { | |
| + fn new(row: (Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>)) -> Self { | |
| Self { row: Arc::new(Mutex::new(Some(row))) } | |
| } | |
| } | |
| @@ -39,7 +39,7 @@ impl RepeatingRelayDataReader { | |
| impl RelayDataReader for RepeatingRelayDataReader { | |
| async fn find_proved_relay_payload( | |
| &self, | |
| - ) -> Result<Vec<(Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> { | |
| + ) -> Result<Vec<(Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> { | |
| // Clone-and-yield: on every poll, the row is still "in broadcasting" | |
| // from the SQL's perspective until upstream code clears it. | |
| Ok(self.row.lock().unwrap().clone().into_iter().collect()) | |
| @@ -69,6 +69,7 @@ async fn notification_triggers_db_poll() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader.clone(), | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| let handle = tokio::spawn(worker.run()); | |
| @@ -114,6 +115,7 @@ async fn slow_broadcast_does_not_dedup_via_poll_db() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader.clone(), | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| let handle = tokio::spawn(worker.run()); | |
| @@ -161,6 +163,7 @@ async fn poll_interval_triggers_without_notification() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader.clone(), | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| let handle = tokio::spawn(worker.run()); | |
| @@ -192,7 +195,7 @@ async fn undecodable_payload_emits_metric_and_routes_via_keep_payload() { | |
| // Garbage bytes that ABI-decode rejects, paired with a valid kind label. | |
| let tx_id = Uuid::new_v4(); | |
| - let bad_row = (tx_id, "transfer".to_string(), vec![0xFFu8; 16], None); | |
| + let bad_row = (tx_id, Uuid::nil(), Uuid::nil(), "transfer".to_string(), vec![0xFFu8; 16], None); | |
| let reader = PreloadedRelayDataReader::new(vec![bad_row]); | |
| let worker = RelayWorker::new( | |
| @@ -203,6 +206,7 @@ async fn undecodable_payload_emits_metric_and_routes_via_keep_payload() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader.clone(), | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| let handle = tokio::spawn(worker.run()); | |
| @@ -250,7 +254,8 @@ async fn undecodable_payload_with_unrecognized_kind_uses_unknown_bucket() { | |
| let (notify, shutdown) = test_notify_and_shutdown(); | |
| let tx_id = Uuid::new_v4(); | |
| - let bad_row = (tx_id, "weird_kind".to_string(), vec![0xFFu8; 16], None); | |
| + let bad_row = | |
| + (tx_id, Uuid::nil(), Uuid::nil(), "weird_kind".to_string(), vec![0xFFu8; 16], None); | |
| let reader = PreloadedRelayDataReader::new(vec![bad_row]); | |
| let worker = RelayWorker::new( | |
| @@ -261,6 +266,7 @@ async fn undecodable_payload_with_unrecognized_kind_uses_unknown_bucket() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader.clone(), | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| let handle = tokio::spawn(worker.run()); | |
| diff --git a/protocol/backend/crates/workers/src/relay/tests/health.rs b/protocol/backend/crates/workers/src/relay/tests/health.rs | |
| index b6c91d9..bf0d2ad 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/tests/health.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/tests/health.rs | |
| @@ -17,9 +17,9 @@ use domain::traits::TransactionStore; | |
| use uuid::Uuid; | |
| use super::{ | |
| - CollectingUpdater, MOCK_RELAYER_ADDRESS, MockSubmitter, PreloadedRelayDataReader, RelayJob, | |
| - RelayWorker, TrackingReconciler, relay_data_row, run_until_idle, test_notify_and_shutdown, | |
| - test_retry_config, | |
| + CollectingUpdater, MOCK_RELAYER_ADDRESS, MockSubmitter, NoopBurnerCanonicalVerifier, | |
| + PreloadedRelayDataReader, RelayJob, RelayWorker, TrackingReconciler, relay_data_row, | |
| + run_until_idle, test_notify_and_shutdown, test_retry_config, | |
| }; | |
| use crate::testing::NoopNullifierCheck; | |
| @@ -43,6 +43,7 @@ async fn report_success_called_on_confirmed_outcome() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_health(Arc::new(health.clone())); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -98,6 +99,7 @@ async fn report_success_not_called_on_already_landed_via_nullifier_preflight() { | |
| crate::testing::SpentWithHashNullifierCheck(landed_hash_bytes), | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_health(Arc::new(health.clone())); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -150,6 +152,7 @@ async fn drop_reports_failure_and_reconciles_originating_slot() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_retry_config(test_retry_config(2)) | |
| .with_health(Arc::new(health.clone())); | |
| @@ -205,6 +208,7 @@ async fn retryable_transport_error_reports_per_slot_failure() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_retry_config(test_retry_config(3)) | |
| .with_health(Arc::new(health.clone())); | |
| @@ -287,6 +291,7 @@ async fn resume_from_persisted_hash_credits_originating_slot_via_route_lookup() | |
| NoopNullifierCheck, | |
| tx_store, | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_health(Arc::new(health.clone())); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_secs(5)).await; | |
| @@ -333,6 +338,7 @@ async fn resume_from_persisted_hash_with_unknown_route_falls_back_to_zero() { | |
| NoopNullifierCheck, | |
| tx_store, | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_health(Arc::new(health.clone())); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_secs(5)).await; | |
| diff --git a/protocol/backend/crates/workers/src/relay/tests/mod.rs b/protocol/backend/crates/workers/src/relay/tests/mod.rs | |
| index 7756716..f7d68ae 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/tests/mod.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/tests/mod.rs | |
| @@ -29,7 +29,8 @@ use tokio_util::sync::CancellationToken; | |
| use uuid::Uuid; | |
| use super::traits::{ | |
| - NonceReconciler, RelayDataReader, RelayerHealthReporter, TransactionStatusWriter, | |
| + BurnerCanonicalVerifier, NonceReconciler, RelayDataReader, RelayerHealthReporter, | |
| + TransactionStatusWriter, | |
| }; | |
| use crate::submitter::{BroadcastResult, ChainSubmitter}; | |
| @@ -37,8 +38,11 @@ use crate::submitter::{BroadcastResult, ChainSubmitter}; | |
| // instead of reaching through `super::super::*`. | |
| pub(super) use super::RelayWorker; | |
| pub(super) use super::job::RelayJob; | |
| +pub(super) use super::traits::NoopBurnerCanonicalVerifier; | |
| +pub(super) use super::traits::NoopBurnerGasStatusUpdater; | |
| pub(super) use super::types::RetryConfig; | |
| pub(super) use crate::outcome::{RelayOutcome, StatusUpdater}; | |
| +pub(super) use crate::testing::NoopNullifierCheck; | |
| // ── Mock RelayDataReader ────────────────────────────────────────────── | |
| @@ -50,7 +54,7 @@ pub(super) struct EmptyRelayDataReader; | |
| impl RelayDataReader for EmptyRelayDataReader { | |
| async fn find_proved_relay_payload( | |
| &self, | |
| - ) -> Result<Vec<(Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> { | |
| + ) -> Result<Vec<(Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> { | |
| Ok(vec![]) | |
| } | |
| async fn fail_relay_atomic(&self, _tx_id: Uuid) -> Result<bool, String> { | |
| @@ -65,13 +69,15 @@ impl RelayDataReader for EmptyRelayDataReader { | |
| #[derive(Clone)] | |
| pub(super) struct PreloadedRelayDataReader { | |
| #[allow(clippy::type_complexity)] | |
| - rows: Arc<Mutex<Vec<(Uuid, String, Vec<u8>, Option<serde_json::Value>)>>>, | |
| + rows: Arc<Mutex<Vec<(Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>)>>>, | |
| poll_count: Arc<AtomicU32>, | |
| mark_failed_calls: Arc<AtomicU32>, | |
| } | |
| impl PreloadedRelayDataReader { | |
| - pub(super) fn new(rows: Vec<(Uuid, String, Vec<u8>, Option<serde_json::Value>)>) -> Self { | |
| + pub(super) fn new( | |
| + rows: Vec<(Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>)>, | |
| + ) -> Self { | |
| Self { | |
| rows: Arc::new(Mutex::new(rows)), | |
| poll_count: Arc::new(AtomicU32::new(0)), | |
| @@ -92,7 +98,7 @@ impl PreloadedRelayDataReader { | |
| impl RelayDataReader for PreloadedRelayDataReader { | |
| async fn find_proved_relay_payload( | |
| &self, | |
| - ) -> Result<Vec<(Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> { | |
| + ) -> Result<Vec<(Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> { | |
| self.poll_count.fetch_add(1, Ordering::SeqCst); | |
| let mut rows = self.rows.lock().unwrap(); | |
| Ok(std::mem::take(&mut *rows)) | |
| @@ -133,8 +139,10 @@ pub(super) async fn run_until_idle<F: Future<Output = ()> + Send + 'static>( | |
| } | |
| /// Encode a `RelayJob` into a relay data row for mock stores. | |
| -pub(super) fn relay_data_row(job: &RelayJob) -> (Uuid, String, Vec<u8>, Option<serde_json::Value>) { | |
| - (job.tx_id(), job.kind().to_string(), job.encode_payload(), None) | |
| +pub(super) fn relay_data_row( | |
| + job: &RelayJob, | |
| +) -> (Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>) { | |
| + (job.tx_id(), Uuid::nil(), Uuid::nil(), job.kind().to_string(), job.encode_payload(), None) | |
| } | |
| /// Retry config with zero backoff for fast tests. | |
| diff --git a/protocol/backend/crates/workers/src/relay/tests/outcome.rs b/protocol/backend/crates/workers/src/relay/tests/outcome.rs | |
| index 9899996..64c93c0 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/tests/outcome.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/tests/outcome.rs | |
| @@ -16,9 +16,9 @@ use uuid::Uuid; | |
| use super::super::outcome::{record_outcome_calls, reset_record_outcome_calls}; | |
| use super::{ | |
| - CollectingUpdater, MockSubmitter, PreloadedRelayDataReader, RelayJob, RelayWorker, | |
| - TrackingReconciler, relay_data_row, run_until_idle, test_notify_and_shutdown, | |
| - test_retry_config, | |
| + CollectingUpdater, MockSubmitter, NoopBurnerCanonicalVerifier, PreloadedRelayDataReader, | |
| + RelayJob, RelayWorker, TrackingReconciler, relay_data_row, run_until_idle, | |
| + test_notify_and_shutdown, test_retry_config, | |
| }; | |
| use crate::testing::NoopNullifierCheck; | |
| @@ -40,6 +40,7 @@ async fn record_outcome_called_once_per_confirmed() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -73,6 +74,7 @@ async fn record_outcome_called_once_for_timeout_abandoned_without_persist() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_watch_tx_timeout(Duration::from_millis(1)) | |
| .with_retry_config(test_retry_config(1)); | |
| @@ -109,6 +111,7 @@ async fn record_outcome_called_per_terminal_in_drop_recovery() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_health(Arc::new(reconciler)) | |
| .with_max_inflight(2); | |
| diff --git a/protocol/backend/crates/workers/src/relay/tests/recovery.rs b/protocol/backend/crates/workers/src/relay/tests/recovery.rs | |
| index 64553b4..543dfb2 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/tests/recovery.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/tests/recovery.rs | |
| @@ -16,9 +16,9 @@ use uuid::Uuid; | |
| use super::super::traits::NullifierCheck; | |
| use super::{ | |
| - CollectingUpdater, EmptyRelayDataReader, MockSubmitter, PreloadedRelayDataReader, RelayJob, | |
| - RelayOutcome, RelayWorker, relay_data_row, run_until_idle, test_notify_and_shutdown, | |
| - test_retry_config, | |
| + CollectingUpdater, EmptyRelayDataReader, MockSubmitter, NoopBurnerCanonicalVerifier, | |
| + PreloadedRelayDataReader, RelayJob, RelayOutcome, RelayWorker, relay_data_row, run_until_idle, | |
| + test_notify_and_shutdown, test_retry_config, | |
| }; | |
| use crate::testing::{NoopNullifierCheck, NoopPermit2Check, UsedPermit2Check}; | |
| @@ -41,6 +41,7 @@ async fn failed_job_does_not_affect_subsequent_jobs() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_retry_config(test_retry_config(0)); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -71,6 +72,7 @@ async fn shuts_down_on_cancellation_token() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| EmptyRelayDataReader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| let handle = tokio::spawn(worker.run()); | |
| @@ -93,6 +95,7 @@ async fn shutdown_token_stops_worker() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| EmptyRelayDataReader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| let handle = tokio::spawn(worker.run()); | |
| @@ -228,6 +231,7 @@ async fn revert_with_spent_nullifier_recovers_as_already_landed() { | |
| DelayedSpentNullifier::new(1, recovered_hash_bytes), | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_retry_config(test_retry_config(0)); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_secs(10)).await; | |
| @@ -277,6 +281,7 @@ async fn deposit_revert_still_fails_until_permit2_indexer_gating_lands() { | |
| NoopNullifierCheck, // deposits have no nullifiers | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_permit2_check(UsedPermit2Check) | |
| .with_retry_config(test_retry_config(0)); | |
| @@ -318,6 +323,7 @@ async fn genuine_revert_without_onchain_recovery_still_fails() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_permit2_check(NoopPermit2Check) | |
| .with_retry_config(test_retry_config(0)); | |
| @@ -362,6 +368,7 @@ async fn batch_abort_revert_with_spent_nullifier_recovers_as_already_landed() { | |
| DelayedSpentNullifier::new(2, recovered_bytes), | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_max_inflight(2) | |
| .with_retry_config(test_retry_config(3)); | |
| @@ -429,6 +436,7 @@ async fn revert_with_spent_nullifier_recovers_withdraw_as_already_landed() { | |
| DelayedSpentNullifier::new(1, recovered_hash_bytes), | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_retry_config(test_retry_config(0)); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_secs(10)).await; | |
| @@ -464,6 +472,7 @@ async fn panicked_watcher_requeues_job() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_max_inflight(3); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_secs(30)).await; | |
| diff --git a/protocol/backend/crates/workers/src/relay/tests/watch.rs b/protocol/backend/crates/workers/src/relay/tests/watch.rs | |
| index 94784f6..394c978 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/tests/watch.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/tests/watch.rs | |
| @@ -16,9 +16,9 @@ use uuid::Uuid; | |
| use super::super::traits::RelayDataReader; | |
| use super::super::types::MAX_REQUEUE_COUNT; | |
| use super::{ | |
| - CollectingUpdater, MockSubmitter, PreloadedRelayDataReader, RelayJob, RelayOutcome, | |
| - RelayWorker, TrackingReconciler, relay_data_row, run_until_idle, test_notify_and_shutdown, | |
| - test_retry_config, | |
| + CollectingUpdater, MockSubmitter, NoopBurnerCanonicalVerifier, PreloadedRelayDataReader, | |
| + RelayJob, RelayOutcome, RelayWorker, TrackingReconciler, relay_data_row, run_until_idle, | |
| + test_notify_and_shutdown, test_retry_config, | |
| }; | |
| use crate::testing::NoopNullifierCheck; | |
| @@ -48,6 +48,7 @@ async fn existing_broadcast_is_watched_not_resubmitted() { | |
| NoopNullifierCheck, | |
| tx_store, | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -102,6 +103,7 @@ async fn already_landed_recovers_tx_hash() { | |
| crate::testing::SpentWithHashNullifierCheck(expected_hash_bytes), | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_millis(50)).await; | |
| @@ -144,6 +146,7 @@ async fn dropped_tx_triggers_resubmit() { | |
| NoopNullifierCheck, | |
| tx_store.clone(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_retry_config(test_retry_config(3)); | |
| run_until_idle(worker.run(), ¬ify, &shutdown, Duration::from_secs(60)).await; | |
| @@ -183,6 +186,7 @@ async fn watch_tx_timeout_does_not_emit_failed_outcome() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_watch_tx_timeout(Duration::from_millis(1)) | |
| .with_retry_config(test_retry_config(1)); | |
| @@ -221,6 +225,7 @@ async fn concurrent_broadcast_before_confirms() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_max_inflight(4); | |
| let handle = tokio::spawn(worker.run()); | |
| @@ -269,6 +274,7 @@ async fn max_inflight_1_is_sequential() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); // default max_inflight=1 | |
| let handle = tokio::spawn(worker.run()); | |
| @@ -314,6 +320,7 @@ async fn drop_aborts_batch_and_requeues() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_max_inflight(3) | |
| .with_health(Arc::new(reconciler.clone())) | |
| @@ -371,6 +378,7 @@ async fn drop_recovery_with_mixed_outcomes() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_max_inflight(3) | |
| .with_health(Arc::new(reconciler.clone())) | |
| @@ -422,6 +430,7 @@ async fn revert_does_not_affect_other_inflight_jobs() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_max_inflight(3) | |
| .with_retry_config(test_retry_config(0)); // no retries for reverts | |
| @@ -469,6 +478,7 @@ async fn shutdown_drains_inflight_watchers() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_max_inflight(3); | |
| let handle = tokio::spawn(worker.run()); | |
| @@ -499,14 +509,15 @@ async fn db_poll_deduplicates_inflight_jobs() { | |
| /// Relay data reader that always returns the same row. | |
| #[derive(Clone)] | |
| struct AlwaysReturnReader { | |
| - row: (Uuid, String, Vec<u8>, Option<serde_json::Value>), | |
| + row: (Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>), | |
| } | |
| #[async_trait] | |
| impl RelayDataReader for AlwaysReturnReader { | |
| async fn find_proved_relay_payload( | |
| &self, | |
| - ) -> Result<Vec<(Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> { | |
| + ) -> Result<Vec<(Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> | |
| + { | |
| Ok(vec![self.row.clone()]) | |
| } | |
| async fn fail_relay_atomic(&self, _tx_id: Uuid) -> Result<bool, String> { | |
| @@ -527,6 +538,7 @@ async fn db_poll_deduplicates_inflight_jobs() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_max_inflight(4); | |
| let handle = tokio::spawn(worker.run()); | |
| @@ -587,6 +599,7 @@ async fn shutdown_with_inflight_watcher_exits_cleanly() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| let handle = tokio::spawn(worker.run()); | |
| @@ -623,6 +636,7 @@ async fn repeated_batch_aborts_hit_requeue_limit() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_health(Arc::new(reconciler.clone())) | |
| .with_max_inflight(1); | |
| @@ -684,6 +698,7 @@ async fn get_receipt_revert_during_batch_abort_fails_job() { | |
| NoopNullifierCheck, | |
| MockTransactionStore::new(), | |
| reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_health(Arc::new(reconciler.clone())) | |
| .with_max_inflight(2); | |
| diff --git a/protocol/backend/crates/workers/src/relay/traits.rs b/protocol/backend/crates/workers/src/relay/traits.rs | |
| index 7c825cd..c1a27df 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/traits.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/traits.rs | |
| @@ -175,6 +175,45 @@ impl BurnerGasStatusUpdater for NoopBurnerGasStatusUpdater { | |
| } | |
| } | |
| +/// Narrow trait for verifying a burner's EVM address against the canonical | |
| +/// `BurnerAccount` store before accepting gas funding from `relay_extra`. | |
| +/// | |
| +/// Used by the poll loop in `db.rs` to close the sideband-tampering attack | |
| +/// (RL-02): the relayer must verify `relay_extra.evm_address` matches the | |
| +/// address stored in `BurnerAccount` for the same `(tx_id, tenant_id, project_id)` | |
| +/// before setting `gas_funding.recipient`. | |
| +#[async_trait] | |
| +pub trait BurnerCanonicalVerifier: Send + Sync + 'static { | |
| + /// Verify `evm_address` is the canonical burner address for the given | |
| + /// transaction, scoped to `tenant_id` + `project_id`. Returns `Ok(true)` | |
| + /// on match, `Ok(false)` if the address doesn't match the canonical record, | |
| + /// or `Err(String)` on a lookup error. | |
| + async fn verify_burner_address( | |
| + &self, | |
| + tx_id: uuid::Uuid, | |
| + evm_address: &str, | |
| + tenant_id: uuid::Uuid, | |
| + project_id: uuid::Uuid, | |
| + ) -> Result<bool, String>; | |
| +} | |
| + | |
| +/// No-op verifier — every address passes. Default for tests and environments | |
| +/// without burner support. | |
| +pub struct NoopBurnerCanonicalVerifier; | |
| + | |
| +#[async_trait] | |
| +impl BurnerCanonicalVerifier for NoopBurnerCanonicalVerifier { | |
| + async fn verify_burner_address( | |
| + &self, | |
| + _tx_id: uuid::Uuid, | |
| + _evm_address: &str, | |
| + _tenant_id: uuid::Uuid, | |
| + _project_id: uuid::Uuid, | |
| + ) -> Result<bool, String> { | |
| + Ok(true) | |
| + } | |
| +} | |
| + | |
| /// Trait for reading relay-payload rows from the transactional outbox. | |
| /// | |
| /// The relay worker polls this on every wake-up (notification or 5s interval) | |
| @@ -183,9 +222,14 @@ impl BurnerGasStatusUpdater for NoopBurnerGasStatusUpdater { | |
| #[async_trait] | |
| pub trait RelayDataReader: Send + Sync + 'static { | |
| /// Return all relay-payload rows for transactions in accepted/proved/broadcasting state. | |
| + /// Includes `tenant_id` and `project_id` so the poll loop can verify burner addresses | |
| + /// against the canonical `BurnerAccount` store (RL-02 fix). | |
| async fn find_proved_relay_payload( | |
| &self, | |
| - ) -> Result<Vec<(uuid::Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String>; | |
| + ) -> Result< | |
| + Vec<(uuid::Uuid, uuid::Uuid, uuid::Uuid, String, Vec<u8>, Option<serde_json::Value>)>, | |
| + String, | |
| + >; | |
| /// Mark an undecodable relay payload as permanently failed. | |
| async fn fail_relay_atomic(&self, tx_id: uuid::Uuid) -> Result<bool, String>; | |
| /// Like `fail_relay_atomic`, but PRESERVES `relay_payload` and | |
| diff --git a/protocol/backend/crates/workers/src/relay/worker.rs b/protocol/backend/crates/workers/src/relay/worker.rs | |
| index 7813243..0c77fc2 100644 | |
| --- a/protocol/backend/crates/workers/src/relay/worker.rs | |
| +++ b/protocol/backend/crates/workers/src/relay/worker.rs | |
| @@ -40,9 +40,9 @@ use super::outcome::record_job_duration; | |
| use super::recovery::{handle_watch_cancelled, handle_watch_error, handle_watcher_panicked}; | |
| use super::retry::handle_error; | |
| use super::traits::{ | |
| - BurnerGasStatusUpdater, NoopBurnerGasStatusUpdater, NoopPermit2Check, NoopRelayerHealth, | |
| - NoopTransactionStatusWriter, NullifierCheck, Permit2Check, RelayDataReader, RelayerHealth, | |
| - TransactionStatusWriter, | |
| + BurnerCanonicalVerifier, BurnerGasStatusUpdater, NoopBurnerGasStatusUpdater, NoopPermit2Check, | |
| + NoopRelayerHealth, NoopTransactionStatusWriter, NullifierCheck, Permit2Check, RelayDataReader, | |
| + RelayerHealth, TransactionStatusWriter, | |
| }; | |
| use super::types::{ | |
| BroadcastPhaseOutcome, BroadcastTaskResult, InflightJob, PendingRetry, RetryConfig, | |
| @@ -58,13 +58,14 @@ use crate::submitter::ChainSubmitter; | |
| /// | |
| /// Submission is idempotent: on retry the worker checks for an existing | |
| /// broadcast tx hash and watches it instead of resubmitting. | |
| -pub struct RelayWorker<S, U, N, T, D> | |
| +pub struct RelayWorker<S, U, N, T, D, B> | |
| where | |
| S: ChainSubmitter, | |
| U: StatusUpdater, | |
| N: NullifierCheck, | |
| T: domain::traits::TransactionStore + Send + Sync + 'static, | |
| D: RelayDataReader, | |
| + B: BurnerCanonicalVerifier, | |
| { | |
| submitter: S, | |
| notify: Arc<Notify>, | |
| @@ -73,6 +74,7 @@ where | |
| nullifier_check: N, | |
| tx_store: T, | |
| relay_store: D, | |
| + burner_verifier: B, | |
| status_writer: Arc<dyn TransactionStatusWriter>, | |
| /// Combined nonce reconciler + slot-health reporter. Production wires | |
| /// the `RelayerPool` here; tests use `NoopRelayerHealth` or a recording | |
| @@ -87,13 +89,14 @@ where | |
| max_inflight: usize, | |
| } | |
| -impl<S, U, N, T, D> RelayWorker<S, U, N, T, D> | |
| +impl<S, U, N, T, D, B> RelayWorker<S, U, N, T, D, B> | |
| where | |
| S: ChainSubmitter, | |
| U: StatusUpdater, | |
| N: NullifierCheck, | |
| T: domain::traits::TransactionStore + Send + Sync + 'static, | |
| D: RelayDataReader, | |
| + B: BurnerCanonicalVerifier, | |
| { | |
| pub fn new( | |
| submitter: S, | |
| @@ -103,6 +106,7 @@ where | |
| nullifier_check: N, | |
| tx_store: T, | |
| relay_store: D, | |
| + burner_verifier: B, | |
| ) -> Self { | |
| Self { | |
| submitter, | |
| @@ -112,6 +116,7 @@ where | |
| nullifier_check, | |
| tx_store, | |
| relay_store, | |
| + burner_verifier, | |
| status_writer: Arc::new(NoopTransactionStatusWriter), | |
| health: Arc::new(NoopRelayerHealth), | |
| burner_gas_updater: Arc::new(NoopBurnerGasStatusUpdater), | |
| @@ -195,6 +200,7 @@ where | |
| let burner_gas_updater = self.burner_gas_updater; | |
| let permit2_check = self.permit2_check; | |
| let status_writer = self.status_writer; | |
| + let burner_verifier = self.burner_verifier; | |
| // Move deps only used in main loop (no Arc needed) | |
| let notify = self.notify; | |
| @@ -647,24 +653,26 @@ where | |
| _ = notify.notified() => { | |
| poll_db( | |
| &relay_store, | |
| + &burner_verifier, | |
| burner_gas_funding_wei, | |
| &retry_set, | |
| &inflight, | |
| &mut pending_jobs, | |
| &mut pending_set, | |
| - &broadcasting_set, | |
| + &mut broadcasting_set, | |
| ) | |
| .await; | |
| } | |
| _ = poll_interval.tick() => { | |
| poll_db( | |
| &relay_store, | |
| + &burner_verifier, | |
| burner_gas_funding_wei, | |
| &retry_set, | |
| &inflight, | |
| &mut pending_jobs, | |
| &mut pending_set, | |
| - &broadcasting_set, | |
| + &mut broadcasting_set, | |
| ) | |
| .await; | |
| } | |
| diff --git a/protocol/backend/tests/tests/burner_gas_funding.rs b/protocol/backend/tests/tests/burner_gas_funding.rs | |
| index 2be1aeb..5d0bf09 100644 | |
| --- a/protocol/backend/tests/tests/burner_gas_funding.rs | |
| +++ b/protocol/backend/tests/tests/burner_gas_funding.rs | |
| @@ -19,7 +19,7 @@ use workers::{RelayJob, RelayOutcome, RelayWorker}; | |
| use support::{ | |
| CollectingUpdater, InMemoryBurnerGasUpdater, InMemoryTxStore, MockChainSubmitter, | |
| - NoopNullifierCheck, SingleJobRelayReader, | |
| + NoopBurnerCanonicalVerifier, NoopNullifierCheck, SingleJobRelayReader, | |
| }; | |
| // ── Helpers ─────────────────────────────────────────────────────────── | |
| @@ -70,7 +70,13 @@ async fn relay_worker_sends_eth_after_burner_withdrawal_confirms() { | |
| let updater = CollectingUpdater::default(); | |
| let gas_updater = InMemoryBurnerGasUpdater::default(); | |
| - let relay_store = SingleJobRelayReader::new(tx_id, &job, Some(burner_extra(burner_address))); | |
| + let relay_store = SingleJobRelayReader::new( | |
| + tx_id, | |
| + Uuid::nil(), | |
| + Uuid::nil(), | |
| + &job, | |
| + Some(burner_extra(burner_address)), | |
| + ); | |
| let relay_notify = Arc::new(Notify::new()); | |
| let shutdown = CancellationToken::new(); | |
| @@ -83,6 +89,7 @@ async fn relay_worker_sends_eth_after_burner_withdrawal_confirms() { | |
| NoopNullifierCheck, | |
| InMemoryTxStore::default(), | |
| relay_store, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_burner_gas_updater(gas_updater.clone()) | |
| .with_burner_gas_funding_wei(U256::from(BURNER_GAS_WEI)); | |
| @@ -130,7 +137,7 @@ async fn relay_worker_skips_eth_for_non_burner_withdrawal() { | |
| let updater = CollectingUpdater::default(); | |
| // No extra → not a burner withdrawal. | |
| - let relay_store = SingleJobRelayReader::new(tx_id, &job, None); | |
| + let relay_store = SingleJobRelayReader::new(tx_id, Uuid::nil(), Uuid::nil(), &job, None); | |
| let relay_notify = Arc::new(Notify::new()); | |
| let shutdown = CancellationToken::new(); | |
| @@ -143,6 +150,7 @@ async fn relay_worker_skips_eth_for_non_burner_withdrawal() { | |
| NoopNullifierCheck, | |
| InMemoryTxStore::default(), | |
| relay_store, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_burner_gas_funding_wei(U256::from(BURNER_GAS_WEI)); | |
| @@ -175,7 +183,13 @@ async fn relay_worker_retries_eth_on_transient_failure() { | |
| let updater = CollectingUpdater::default(); | |
| let gas_updater = InMemoryBurnerGasUpdater::default(); | |
| - let relay_store = SingleJobRelayReader::new(tx_id, &job, Some(burner_extra(burner_address))); | |
| + let relay_store = SingleJobRelayReader::new( | |
| + tx_id, | |
| + Uuid::nil(), | |
| + Uuid::nil(), | |
| + &job, | |
| + Some(burner_extra(burner_address)), | |
| + ); | |
| let relay_notify = Arc::new(Notify::new()); | |
| let shutdown = CancellationToken::new(); | |
| @@ -188,6 +202,7 @@ async fn relay_worker_retries_eth_on_transient_failure() { | |
| NoopNullifierCheck, | |
| InMemoryTxStore::default(), | |
| relay_store, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_burner_gas_updater(gas_updater.clone()) | |
| .with_burner_gas_funding_wei(U256::from(BURNER_GAS_WEI)); | |
| @@ -231,7 +246,13 @@ async fn relay_worker_marks_gas_failed_after_exhausting_retries() { | |
| let updater = CollectingUpdater::default(); | |
| let gas_updater = InMemoryBurnerGasUpdater::default(); | |
| - let relay_store = SingleJobRelayReader::new(tx_id, &job, Some(burner_extra(burner_address))); | |
| + let relay_store = SingleJobRelayReader::new( | |
| + tx_id, | |
| + Uuid::nil(), | |
| + Uuid::nil(), | |
| + &job, | |
| + Some(burner_extra(burner_address)), | |
| + ); | |
| let relay_notify = Arc::new(Notify::new()); | |
| let shutdown = CancellationToken::new(); | |
| @@ -244,6 +265,7 @@ async fn relay_worker_marks_gas_failed_after_exhausting_retries() { | |
| NoopNullifierCheck, | |
| InMemoryTxStore::default(), | |
| relay_store, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_burner_gas_updater(gas_updater.clone()) | |
| .with_burner_gas_funding_wei(U256::from(BURNER_GAS_WEI)); | |
| @@ -285,7 +307,13 @@ async fn relay_worker_skips_gas_funding_when_already_funded() { | |
| // Pre-mark address as already funded (simulates crash recovery). | |
| gas_updater.already_funded.lock().unwrap().push(burner_address.to_string().to_lowercase()); | |
| - let relay_store = SingleJobRelayReader::new(tx_id, &job, Some(burner_extra(burner_address))); | |
| + let relay_store = SingleJobRelayReader::new( | |
| + tx_id, | |
| + Uuid::nil(), | |
| + Uuid::nil(), | |
| + &job, | |
| + Some(burner_extra(burner_address)), | |
| + ); | |
| let relay_notify = Arc::new(Notify::new()); | |
| let shutdown = CancellationToken::new(); | |
| @@ -298,6 +326,7 @@ async fn relay_worker_skips_gas_funding_when_already_funded() { | |
| NoopNullifierCheck, | |
| InMemoryTxStore::default(), | |
| relay_store, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_burner_gas_updater(gas_updater.clone()) | |
| .with_burner_gas_funding_wei(U256::from(BURNER_GAS_WEI)); | |
| @@ -343,7 +372,13 @@ async fn relay_worker_skips_gas_funding_when_gas_pending() { | |
| .unwrap() | |
| .push((burner_address.to_string().to_lowercase(), "0xdeadbeef".to_string())); | |
| - let relay_store = SingleJobRelayReader::new(tx_id, &job, Some(burner_extra(burner_address))); | |
| + let relay_store = SingleJobRelayReader::new( | |
| + tx_id, | |
| + Uuid::nil(), | |
| + Uuid::nil(), | |
| + &job, | |
| + Some(burner_extra(burner_address)), | |
| + ); | |
| let relay_notify = Arc::new(Notify::new()); | |
| let shutdown = CancellationToken::new(); | |
| @@ -356,6 +391,7 @@ async fn relay_worker_skips_gas_funding_when_gas_pending() { | |
| NoopNullifierCheck, | |
| InMemoryTxStore::default(), | |
| relay_store, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_burner_gas_updater(gas_updater.clone()) | |
| .with_burner_gas_funding_wei(U256::from(BURNER_GAS_WEI)); | |
| diff --git a/protocol/backend/tests/tests/full_engine/mod.rs b/protocol/backend/tests/tests/full_engine/mod.rs | |
| index f883de2..0b0b715 100644 | |
| --- a/protocol/backend/tests/tests/full_engine/mod.rs | |
| +++ b/protocol/backend/tests/tests/full_engine/mod.rs | |
| @@ -53,6 +53,7 @@ use storage::{ | |
| PgUserStore, | |
| }; | |
| use workers::prover::{LocalProver, ProverConfig}; | |
| +use workers::relay::NoopBurnerCanonicalVerifier; | |
| use workers::testing::NoopNullifierCheck; | |
| use workers::{ | |
| DepositNotifier, MpscProveDispatcher, ProveJob, ProveWorker, RelayDataReader, RelayWorker, | |
| @@ -596,10 +597,16 @@ struct TestRelayDataReader(PgTransactionStore); | |
| impl RelayDataReader for TestRelayDataReader { | |
| async fn find_proved_relay_payload( | |
| &self, | |
| - ) -> Result<Vec<(uuid::Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> { | |
| + ) -> Result< | |
| + Vec<(uuid::Uuid, uuid::Uuid, uuid::Uuid, String, Vec<u8>, Option<serde_json::Value>)>, | |
| + String, | |
| + > { | |
| use domain::traits::TransactionStore; | |
| let rows = self.0.find_proved_relay_payload().await.map_err(|e| e.to_string())?; | |
| - Ok(rows.into_iter().map(|(id, kind, p, e)| (id, kind.to_string(), p, e)).collect()) | |
| + Ok(rows | |
| + .into_iter() | |
| + .map(|(id, tid, pid, kind, p, e)| (id, tid, pid, kind.to_string(), p, e)) | |
| + .collect()) | |
| } | |
| async fn fail_relay_atomic(&self, tx_id: uuid::Uuid) -> Result<bool, String> { | |
| use domain::traits::TransactionStore; | |
| @@ -860,9 +867,6 @@ pub async fn boot_with_keys( | |
| let max_inflight = slot_addresses.len(); | |
| handles.push(tokio::spawn( | |
| RelayWorker::new( | |
| - // Hand the worker its own Arc clone — engages the | |
| - // `ChainSubmitter for Arc<S>` blanket impl so `pool_route_count` | |
| - // and the worker's submitter share the same pool. | |
| Arc::clone(&relayer_pool), | |
| relay_notify.clone(), | |
| shutdown.clone(), | |
| @@ -870,6 +874,7 @@ pub async fn boot_with_keys( | |
| NoopNullifierCheck, | |
| PgTransactionStore::new(pool.clone()), | |
| relay_data_reader, | |
| + NoopBurnerCanonicalVerifier, | |
| ) | |
| .with_max_inflight(max_inflight) | |
| .run(), | |
| diff --git a/protocol/backend/tests/tests/relayer_worker.rs b/protocol/backend/tests/tests/relayer_worker.rs | |
| index 36a02fd..1cf8007 100644 | |
| --- a/protocol/backend/tests/tests/relayer_worker.rs | |
| +++ b/protocol/backend/tests/tests/relayer_worker.rs | |
| @@ -18,7 +18,10 @@ use contracts::{Ciphertext, Note, PermitTransferFrom, TokenPermissions}; | |
| use serial_test::serial; | |
| use workers::{RelayDataReader, RelayJob, RelayOutcome, RelayWorker}; | |
| -use support::{CollectingUpdater, InMemoryTxStore, NoopNullifierCheck, SingleJobRelayReader}; | |
| +use support::{ | |
| + CollectingUpdater, InMemoryTxStore, NoopBurnerCanonicalVerifier, NoopNullifierCheck, | |
| + SingleJobRelayReader, | |
| +}; | |
| /// Loads deployment or skips the test if Anvil is not available. | |
| macro_rules! require_anvil { | |
| @@ -39,7 +42,7 @@ struct EmptyRelayDataReader; | |
| impl RelayDataReader for EmptyRelayDataReader { | |
| async fn find_proved_relay_payload( | |
| &self, | |
| - ) -> Result<Vec<(Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> { | |
| + ) -> Result<Vec<(Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> { | |
| Ok(vec![]) | |
| } | |
| async fn fail_relay_atomic(&self, _tx_id: Uuid) -> Result<bool, String> { | |
| @@ -76,7 +79,8 @@ async fn deposit_via_relay_worker_reports_failure_for_invalid_deposit() { | |
| }, | |
| signature: Bytes::new(), | |
| }; | |
| - let relay_store = SingleJobRelayReader::new(tx_id, &invalid_deposit_job, None); | |
| + let relay_store = | |
| + SingleJobRelayReader::new(tx_id, Uuid::nil(), Uuid::nil(), &invalid_deposit_job, None); | |
| // Spawn relay worker (submits on-chain). | |
| let relay_worker = RelayWorker::new( | |
| @@ -87,6 +91,7 @@ async fn deposit_via_relay_worker_reports_failure_for_invalid_deposit() { | |
| NoopNullifierCheck, | |
| InMemoryTxStore::default(), | |
| relay_store, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| let relay_handle = tokio::spawn(relay_worker.run()); | |
| @@ -148,7 +153,8 @@ async fn deposit_via_relay_worker_confirms_valid_deposit() { | |
| let shutdown = CancellationToken::new(); | |
| let deposit_job = RelayJob::Deposit { tx_id, depositor, notes, ciphertexts, permit, signature }; | |
| - let relay_store = SingleJobRelayReader::new(tx_id, &deposit_job, None); | |
| + let relay_store = | |
| + SingleJobRelayReader::new(tx_id, Uuid::nil(), Uuid::nil(), &deposit_job, None); | |
| let relay_worker = RelayWorker::new( | |
| client, | |
| @@ -158,6 +164,7 @@ async fn deposit_via_relay_worker_confirms_valid_deposit() { | |
| NoopNullifierCheck, | |
| InMemoryTxStore::default(), | |
| relay_store, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| let relay_handle = tokio::spawn(relay_worker.run()); | |
| @@ -199,6 +206,7 @@ async fn relay_worker_shuts_down_when_cancelled() { | |
| NoopNullifierCheck, | |
| InMemoryTxStore::default(), | |
| EmptyRelayDataReader, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| let handle = tokio::spawn(worker.run()); | |
| @@ -222,7 +230,8 @@ async fn relay_worker_reports_failure_for_invalid_transfer() { | |
| let tx_id = Uuid::new_v4(); | |
| // Empty transactions vec will revert | |
| let invalid_job = RelayJob::Transfer { tx_id, transactions: vec![] }; | |
| - let relay_store = SingleJobRelayReader::new(tx_id, &invalid_job, None); | |
| + let relay_store = | |
| + SingleJobRelayReader::new(tx_id, Uuid::nil(), Uuid::nil(), &invalid_job, None); | |
| let worker = RelayWorker::new( | |
| client, | |
| @@ -232,6 +241,7 @@ async fn relay_worker_reports_failure_for_invalid_transfer() { | |
| NoopNullifierCheck, | |
| InMemoryTxStore::default(), | |
| relay_store, | |
| + NoopBurnerCanonicalVerifier, | |
| ); | |
| let handle = tokio::spawn(worker.run()); | |
| diff --git a/protocol/backend/tests/tests/support/mod.rs b/protocol/backend/tests/tests/support/mod.rs | |
| index d1bd3b8..292ef8c 100644 | |
| --- a/protocol/backend/tests/tests/support/mod.rs | |
| +++ b/protocol/backend/tests/tests/support/mod.rs | |
| @@ -7,9 +7,13 @@ use std::sync::{Arc, Mutex}; | |
| use async_trait::async_trait; | |
| use uuid::Uuid; | |
| -use workers::{BurnerGasStatusUpdater, RelayDataReader, RelayJob, RelayOutcome, StatusUpdater}; | |
| +use workers::{ | |
| + BurnerCanonicalVerifier, BurnerGasStatusUpdater, RelayDataReader, RelayJob, RelayOutcome, | |
| + StatusUpdater, | |
| +}; | |
| pub use domain::services::test_support::MockTransactionStore as InMemoryTxStore; | |
| +pub use workers::relay::NoopBurnerCanonicalVerifier; | |
| pub use workers::testing::NoopNullifierCheck; | |
| // ── Outcome collector ──────────────────────────────────────────────── | |
| @@ -35,14 +39,22 @@ impl StatusUpdater for CollectingUpdater { | |
| #[derive(Clone)] | |
| pub struct SingleJobRelayReader { | |
| #[allow(clippy::type_complexity)] | |
| - data: Arc<Mutex<Option<(Uuid, String, Vec<u8>, Option<serde_json::Value>)>>>, | |
| + data: Arc<Mutex<Option<(Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>)>>>, | |
| } | |
| impl SingleJobRelayReader { | |
| - pub fn new(tx_id: Uuid, job: &RelayJob, extra: Option<serde_json::Value>) -> Self { | |
| + pub fn new( | |
| + tx_id: Uuid, | |
| + tenant_id: Uuid, | |
| + project_id: Uuid, | |
| + job: &RelayJob, | |
| + extra: Option<serde_json::Value>, | |
| + ) -> Self { | |
| Self { | |
| data: Arc::new(Mutex::new(Some(( | |
| tx_id, | |
| + tenant_id, | |
| + project_id, | |
| job.kind().to_string(), | |
| job.encode_payload(), | |
| extra, | |
| @@ -55,7 +67,7 @@ impl SingleJobRelayReader { | |
| impl RelayDataReader for SingleJobRelayReader { | |
| async fn find_proved_relay_payload( | |
| &self, | |
| - ) -> Result<Vec<(Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> { | |
| + ) -> Result<Vec<(Uuid, Uuid, Uuid, String, Vec<u8>, Option<serde_json::Value>)>, String> { | |
| let mut guard = self.data.lock().unwrap(); | |
| Ok(guard.take().into_iter().collect()) | |
| } | |
| diff --git a/protocol/cli/scripts/stress.ts b/protocol/cli/scripts/stress.ts | |
| index 0e21762..2655747 100644 | |
| --- a/protocol/cli/scripts/stress.ts | |
| +++ b/protocol/cli/scripts/stress.ts | |
| @@ -374,7 +374,11 @@ function resolvePath( | |
| return fallbackPath; | |
| } | |
| if (path.isAbsolute(input)) { | |
| - return input; | |
| + const resolved = resolve(input); | |
| + if (!resolved.startsWith(baseDir)) { | |
| + throw new Error(`Absolute path must be within base directory`); | |
| + } | |
| + return resolved; | |
| } | |
| const cwdPath = path.resolve(process.cwd(), input); | |
| if (fs.existsSync(cwdPath)) { | |
| diff --git a/protocol/cli/src/commands/balance.ts b/protocol/cli/src/commands/balance.ts | |
| index 5718df9..eb3c7c3 100644 | |
| --- a/protocol/cli/src/commands/balance.ts | |
| +++ b/protocol/cli/src/commands/balance.ts | |
| @@ -19,6 +19,14 @@ export function registerBalanceCommand(parent: Command) { | |
| .action( | |
| withCliErrorHandling(async (opts: BalanceOptions, cmd: Command) => { | |
| const globalOpts = cmd.optsWithGlobals<GlobalOptions>(); | |
| + if (opts.token) { | |
| + const EVM_ADDRESS_RE = /^0x[a-fA-F0-9]{40}$/; | |
| + if (!EVM_ADDRESS_RE.test(opts.token)) { | |
| + throw new Error( | |
| + "Invalid token address format: expected 0x-prefixed 40-hex", | |
| + ); | |
| + } | |
| + } | |
| const unlink = await loadOperationalClient( | |
| parseAccountIndex(opts.account), | |
| globalOpts, | |
| diff --git a/protocol/cli/src/signer.ts b/protocol/cli/src/signer.ts | |
| index 801ad21..2b1697d 100644 | |
| --- a/protocol/cli/src/signer.ts | |
| +++ b/protocol/cli/src/signer.ts | |
| @@ -26,6 +26,12 @@ export function createEvmSigner(privateKeyOpt?: string): EvmSigner { | |
| "No EVM private key provided. Use --evm-private-key or set $DEPOSITOR_PRIVATE_KEY.", | |
| ); | |
| } | |
| + const KEY_HEX_RE = /^0x[a-fA-F0-9]{64}$/; | |
| + if (!KEY_HEX_RE.test(privateKey)) { | |
| + throw new CliSigningError( | |
| + "Invalid private key format: expected 0x-prefixed 64-hex string", | |
| + ); | |
| + } | |
| const account = privateKeyToAccount(privateKey as `0x${string}`); | |
| return { | |
| diff --git a/protocol/cli/src/wallet.ts b/protocol/cli/src/wallet.ts | |
| index 9b90ab7..2f7f21b 100644 | |
| --- a/protocol/cli/src/wallet.ts | |
| +++ b/protocol/cli/src/wallet.ts | |
| @@ -1,6 +1,6 @@ | |
| import crypto from "node:crypto"; | |
| import fs from "node:fs"; | |
| -import path from "node:path"; | |
| +import path, { resolve } from "node:path"; | |
| import { | |
| account, | |
| createClient, | |
| @@ -40,7 +40,14 @@ interface WalletAccount { | |
| } | |
| function getWalletPath(opts?: { wallet?: string }): string { | |
| - return opts?.wallet || process.env.UNLINK_WALLET_PATH || DEFAULT_WALLET_PATH; | |
| + const walletPath = | |
| + opts?.wallet || process.env.UNLINK_WALLET_PATH || DEFAULT_WALLET_PATH; | |
| + const resolvedPath = resolve(walletPath); | |
| + const allowedBase = resolve(process.env.HOME || "~", ".unlink"); | |
| + if (!resolvedPath.startsWith(allowedBase)) { | |
| + throw new Error("UNLINK_WALLET_PATH must be within ~/.unlink"); | |
| + } | |
| + return walletPath; | |
| } | |
| export function walletExists(opts?: { wallet?: string }): boolean { | |
| @@ -56,6 +63,17 @@ export async function createWallet(opts?: { | |
| fs.mkdirSync(dir, { recursive: true }); | |
| } | |
| + try { | |
| + const stat = fs.statSync(walletPath); | |
| + if (stat.isSymbolicLink()) { | |
| + throw new Error(`Refusing to write to symlink: ${walletPath}`); | |
| + } | |
| + } catch (e: unknown) { | |
| + if ((e as NodeJS.ErrnoException).code !== "ENOENT") { | |
| + throw e; | |
| + } | |
| + } | |
| + | |
| const seed = crypto.randomBytes(64); | |
| const keys = await deriveKeys(seed, 0); | |
| @@ -86,6 +104,17 @@ export async function addAccount( | |
| const walletPath = getWalletPath(opts); | |
| const walletFile = loadWalletFile(opts); | |
| + try { | |
| + const stat = fs.statSync(walletPath); | |
| + if (stat.isSymbolicLink()) { | |
| + throw new Error(`Refusing to write to symlink: ${walletPath}`); | |
| + } | |
| + } catch (e: unknown) { | |
| + if ((e as NodeJS.ErrnoException).code !== "ENOENT") { | |
| + throw e; | |
| + } | |
| + } | |
| + | |
| const index = walletFile.accounts.length; | |
| const seed = Buffer.from(walletFile.seed, "hex"); | |
| const keys = await deriveKeys(new Uint8Array(seed), index); | |
| diff --git a/protocol/openapi/dashboard.yaml b/protocol/openapi/dashboard.yaml | |
| index 8155320..4af9ec1 100644 | |
| --- a/protocol/openapi/dashboard.yaml | |
| +++ b/protocol/openapi/dashboard.yaml | |
| @@ -404,7 +404,7 @@ components: | |
| ProjectResponse: | |
| type: object | |
| - required: [id, tenant_id, name, slug, created_at] | |
| + required: [id, tenant_id, tenant_slug, name, slug, created_at] | |
| properties: | |
| id: | |
| type: string | |
| @@ -412,6 +412,9 @@ components: | |
| tenant_id: | |
| type: string | |
| format: uuid | |
| + tenant_slug: | |
| + type: string | |
| + description: Tenant slug for tenant-scoped project filtering | |
| name: | |
| type: string | |
| slug: | |
| diff --git a/protocol/sdk/src/__tests__/burner.test.ts b/protocol/sdk/src/__tests__/burner.test.ts | |
| index 2d65873..e20cb06 100644 | |
| --- a/protocol/sdk/src/__tests__/burner.test.ts | |
| +++ b/protocol/sdk/src/__tests__/burner.test.ts | |
| @@ -1,4 +1,5 @@ | |
| import { describe, expect, it, vi } from "vitest"; | |
| +import { poseidon1, poseidon2, poseidon3, poseidon4, poseidon5 } from "poseidon-lite"; | |
| import { BurnerWallet } from "../burner.js"; | |
| import { createHttpClient } from "../client.js"; | |
| @@ -6,6 +7,13 @@ import { createHttpClient } from "../client.js"; | |
| const BASE_URL = "https://api.test.example.com"; | |
| const API_KEY = "test-api-key"; | |
| +const POSEIDON_FNS: ((inputs: bigint[]) => bigint)[] = [ | |
| + () => 0n, poseidon1, poseidon2, poseidon3, poseidon4, poseidon5, | |
| +]; | |
| +function poseidonHash(inputs: bigint[]): bigint { | |
| + return POSEIDON_FNS[inputs.length]!(inputs); | |
| +} | |
| + | |
| /** Build a mock fetch that routes by URL pattern and optional method. */ | |
| function mockFetchRoutes( | |
| routes: Record<string, { method?: string; body: unknown; status?: number }>, | |
| @@ -38,6 +46,20 @@ describe("BurnerWallet", () => { | |
| describe("fundFromPool", () => { | |
| it("calls /burner/create and signs the withdrawal", async () => { | |
| const txId = "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"; | |
| + const burner = await BurnerWallet.create(); | |
| + const tokenAddr = "0x" + "00".repeat(20); | |
| + const amount = "1000000"; | |
| + | |
| + const burnerNpK = BigInt("0x" + burner.address.slice(2).padStart(64, "0")); | |
| + const tokenFr = BigInt("0x" + tokenAddr.slice(2).padStart(64, "0")); | |
| + const amountFr = BigInt(amount); | |
| + const commitment = poseidonHash([burnerNpK, tokenFr, amountFr]); | |
| + | |
| + const hInputs = poseidonHash([10n, 20n]); | |
| + const hNullifiers = poseidonHash([30n]); | |
| + const hCommitments = poseidonHash([commitment]); | |
| + const messageHash = poseidonHash([hInputs, hNullifiers, hCommitments]); | |
| + | |
| const mockFetch = mockFetchRoutes({ | |
| "/burner/create": { | |
| method: "POST", | |
| @@ -46,13 +68,12 @@ describe("BurnerWallet", () => { | |
| data: { | |
| tx_id: txId, | |
| signing_request: { | |
| - message_hash: | |
| - "6663771639536222911804260354004193920994075447051782271212316515499152963724", | |
| + message_hash: messageHash.toString(), | |
| public_inputs: { | |
| merkle_root: "10", | |
| context_hash: "20", | |
| nullifiers: ["30"], | |
| - all_commitments_out: ["40"], | |
| + all_commitments_out: [commitment.toString()], | |
| }, | |
| }, | |
| }, | |
| @@ -65,7 +86,6 @@ describe("BurnerWallet", () => { | |
| }); | |
| const client = createHttpClient(BASE_URL, API_KEY, mockFetch); | |
| - const burner = await BurnerWallet.create(); | |
| const { deriveAccountKeys } = await import("../keys/derive.js"); | |
| const keys = await deriveAccountKeys( | |
| @@ -75,14 +95,78 @@ describe("BurnerWallet", () => { | |
| const result = await burner.fundFromPool(client, { | |
| senderKeys: keys, | |
| - token: "0x" + "00".repeat(20), | |
| - amount: "1000000", | |
| + token: tokenAddr, | |
| + amount: amount, | |
| environment: "test", | |
| }); | |
| expect(result.txId).toBe(txId); | |
| expect(result.status).toBe("accepted"); | |
| }); | |
| + | |
| + it("rejects a malicious backend that substitutes the commitment", async () => { | |
| + const txId = "bbbbbbbb-cccc-dddd-eeee-ffffffffffff"; | |
| + const burner = await BurnerWallet.create(); | |
| + const tokenAddr = "0x" + "00".repeat(20); | |
| + const amount = "2000000"; | |
| + | |
| + const burnerNpK = BigInt("0x" + burner.address.slice(2).padStart(64, "0")); | |
| + const tokenFr = BigInt("0x" + tokenAddr.slice(2).padStart(64, "0")); | |
| + const amountFr = BigInt(amount); | |
| + | |
| + const WRONG_COMMITMENT = BigInt( | |
| + "8888888888888888888888888888888888888888888888888888", | |
| + ); | |
| + const wrongHCommitments = poseidonHash([WRONG_COMMITMENT]); | |
| + const hInputs = poseidonHash([10n, 20n]); | |
| + const hNullifiers = poseidonHash([30n]); | |
| + const wrongMsgHash = poseidonHash([ | |
| + hInputs, | |
| + hNullifiers, | |
| + wrongHCommitments, | |
| + ]); | |
| + | |
| + const mockFetch = mockFetchRoutes({ | |
| + "/burner/create": { | |
| + method: "POST", | |
| + status: 201, | |
| + body: { | |
| + data: { | |
| + tx_id: txId, | |
| + signing_request: { | |
| + message_hash: wrongMsgHash.toString(), | |
| + public_inputs: { | |
| + merkle_root: "10", | |
| + context_hash: "20", | |
| + nullifiers: ["30"], | |
| + all_commitments_out: [WRONG_COMMITMENT.toString()], | |
| + }, | |
| + }, | |
| + }, | |
| + }, | |
| + }, | |
| + }); | |
| + | |
| + const client = createHttpClient(BASE_URL, API_KEY, mockFetch); | |
| + | |
| + const { deriveAccountKeys } = await import("../keys/derive.js"); | |
| + const keys = await deriveAccountKeys( | |
| + Buffer.from("test-seed-for-burner-wallet-testing-1234", "utf-8"), | |
| + 0, | |
| + ); | |
| + | |
| + try { | |
| + await burner.fundFromPool(client, { | |
| + senderKeys: keys, | |
| + token: tokenAddr, | |
| + amount, | |
| + environment: "test", | |
| + }); | |
| + expect.unreachable("malicious backend should have been rejected"); | |
| + } catch (err) { | |
| + expect((err as Error).message).toContain("commitment mismatch"); | |
| + } | |
| + }); | |
| }); | |
| describe("getStatus", () => { | |
| diff --git a/protocol/sdk/src/__tests__/transfer.test.ts b/protocol/sdk/src/__tests__/transfer.test.ts | |
| index 05d80e2..79c9c0e 100644 | |
| --- a/protocol/sdk/src/__tests__/transfer.test.ts | |
| +++ b/protocol/sdk/src/__tests__/transfer.test.ts | |
| @@ -1,4 +1,5 @@ | |
| import { describe, expect, it, vi } from "vitest"; | |
| +import { poseidon1, poseidon2, poseidon3, poseidon4, poseidon5 } from "poseidon-lite"; | |
| import { createHttpClient } from "../client.js"; | |
| import { eddsaSign, eddsaVerify } from "../crypto/eddsa.js"; | |
| @@ -10,15 +11,36 @@ import { transfer } from "../transactions/transfer.js"; | |
| const BASE_URL = "http://localhost:3000"; | |
| const API_KEY = "test-key"; | |
| -// Poseidon(111, 222, 333, 444) — must match the mock public_inputs | |
| -const TEST_MESSAGE_HASH = | |
| - "19254840519170188545862775101913657432245463725821477914502433725206377151920"; | |
| -// Poseidon(111, 222, 333, 444, 555) — for multi-recipient mock (2 commitments) | |
| -const TEST_MESSAGE_HASH_MULTI = | |
| - "19713401851461044559470943798349333603177634580660808703781890624364878209366"; | |
| -// Poseidon(1, 2, 3, 4) — for submit-failure mock | |
| -const TEST_MESSAGE_HASH_SUBMIT = | |
| - "21068280238279045573778304197429874446618930746644087956846565421834592537736"; | |
| +const POSEIDON_FNS: ((inputs: bigint[]) => bigint)[] = [ | |
| + () => 0n, poseidon1, poseidon2, poseidon3, poseidon4, poseidon5, | |
| +]; | |
| +function poseidonHash(inputs: bigint[]): bigint { | |
| + return POSEIDON_FNS[inputs.length]!(inputs); | |
| +} | |
| + | |
| +const RECIPIENT1_NPK = 1n; | |
| +const RECIPIENT1_TOKEN = 2n; | |
| +const COMMITMENT1 = poseidonHash([RECIPIENT1_NPK, RECIPIENT1_TOKEN, 500n]); | |
| + | |
| +const RECIPIENT2_NPK = 3n; | |
| +const RECIPIENT2_TOKEN = 4n; | |
| +const COMMITMENT2 = poseidonHash([RECIPIENT2_NPK, RECIPIENT2_TOKEN, 2000n]); | |
| + | |
| +const hInputs = poseidonHash([111n, 222n]); | |
| +const hNullifiers = poseidonHash([333n]); | |
| +const hCommitments = poseidonHash([COMMITMENT1]); | |
| +const TEST_MESSAGE_HASH = poseidonHash([hInputs, hNullifiers, hCommitments]).toString(); | |
| + | |
| +const hCommitmentsMulti = poseidonHash([COMMITMENT1, COMMITMENT2]); | |
| +const TEST_MESSAGE_HASH_MULTI = poseidonHash([hInputs, hNullifiers, hCommitmentsMulti]).toString(); | |
| + | |
| +const SUBMIT_NPK = 5n; | |
| +const SUBMIT_TOKEN = 6n; | |
| +const SUBMIT_COMMITMENT = poseidonHash([SUBMIT_NPK, SUBMIT_TOKEN, 1n]); | |
| +const hInputs2 = poseidonHash([1n, 2n]); | |
| +const hNullifiers2 = poseidonHash([3n]); | |
| +const hCommitments2 = poseidonHash([SUBMIT_COMMITMENT]); | |
| +const TEST_MESSAGE_HASH_SUBMIT = poseidonHash([hInputs2, hNullifiers2, hCommitments2]).toString(); | |
| async function getTestKeys() { | |
| const seed = new Uint8Array(64).fill(42); | |
| @@ -61,7 +83,7 @@ describe("transfer", () => { | |
| merkle_root: "111", | |
| context_hash: "222", | |
| nullifiers: ["333"], | |
| - all_commitments_out: ["444"], | |
| + all_commitments_out: [COMMITMENT1.toString()], | |
| }, | |
| selected_notes: [{ leaf_index: 0, amount: "1000" }], | |
| circuit_id: "spend_10x4_v1", | |
| @@ -78,8 +100,8 @@ describe("transfer", () => { | |
| senderKeys: keys, | |
| transfers: [ | |
| { | |
| - recipientAddress: "unlink1recipient", | |
| - token: "0xToken", | |
| + recipientAddress: "0x0000000000000000000000000000000000000001", | |
| + token: "0x0000000000000000000000000000000000000002", | |
| amount: "500", | |
| }, | |
| ], | |
| @@ -98,8 +120,8 @@ describe("transfer", () => { | |
| expect(prepareBody.unlink_address).toBe(keys.address); | |
| expect(prepareBody.transfers).toEqual([ | |
| { | |
| - unlink_address: "unlink1recipient", | |
| - token: "0xToken", | |
| + unlink_address: "0x0000000000000000000000000000000000000001", | |
| + token: "0x0000000000000000000000000000000000000002", | |
| amount: "500", | |
| }, | |
| ]); | |
| @@ -128,7 +150,7 @@ describe("transfer", () => { | |
| merkle_root: "111", | |
| context_hash: "222", | |
| nullifiers: ["333"], | |
| - all_commitments_out: ["444", "555"], | |
| + all_commitments_out: [COMMITMENT1.toString(), COMMITMENT2.toString()], | |
| }, | |
| selected_notes: [{ leaf_index: 0, amount: "3000" }], | |
| circuit_id: "spend_10x4_v1", | |
| @@ -145,13 +167,13 @@ describe("transfer", () => { | |
| senderKeys: keys, | |
| transfers: [ | |
| { | |
| - recipientAddress: "unlink1alice", | |
| - token: "0xToken", | |
| - amount: "1000", | |
| + recipientAddress: "0x0000000000000000000000000000000000000001", | |
| + token: "0x0000000000000000000000000000000000000002", | |
| + amount: "500", | |
| }, | |
| { | |
| - recipientAddress: "unlink1bob", | |
| - token: "0xToken", | |
| + recipientAddress: "0x0000000000000000000000000000000000000003", | |
| + token: "0x0000000000000000000000000000000000000004", | |
| amount: "2000", | |
| }, | |
| ], | |
| @@ -166,8 +188,8 @@ describe("transfer", () => { | |
| const prepareBody = await (mockFetch.mock.calls[0][0] as Request).json(); | |
| expect(prepareBody.transfers).toEqual([ | |
| - { unlink_address: "unlink1alice", token: "0xToken", amount: "1000" }, | |
| - { unlink_address: "unlink1bob", token: "0xToken", amount: "2000" }, | |
| + { unlink_address: "0x0000000000000000000000000000000000000001", token: "0x0000000000000000000000000000000000000002", amount: "500" }, | |
| + { unlink_address: "0x0000000000000000000000000000000000000003", token: "0x0000000000000000000000000000000000000004", amount: "2000" }, | |
| ]); | |
| }); | |
| @@ -198,7 +220,7 @@ describe("transfer", () => { | |
| await transfer(client, { | |
| senderKeys: keys, | |
| transfers: [ | |
| - { recipientAddress: "unlink1x", token: "0xT", amount: "999" }, | |
| + { recipientAddress: "0x0000000000000000000000000000000000000001", token: "0x0000000000000000000000000000000000000002", amount: "999" }, | |
| ], | |
| environment: "base-sepolia", | |
| }); | |
| @@ -223,7 +245,7 @@ describe("transfer", () => { | |
| merkle_root: "111", | |
| context_hash: "222", | |
| nullifiers: ["333"], | |
| - all_commitments_out: ["444"], | |
| + all_commitments_out: [COMMITMENT1.toString()], | |
| }, | |
| selected_notes: [{ leaf_index: 0, amount: "1000" }], | |
| circuit_id: "spend_10x4_v1", | |
| @@ -239,13 +261,72 @@ describe("transfer", () => { | |
| transfer(client, { | |
| senderKeys: keys, | |
| transfers: [ | |
| - { recipientAddress: "unlink1x", token: "0xT", amount: "1" }, | |
| + { recipientAddress: "0x0000000000000000000000000000000000000001", token: "0x0000000000000000000000000000000000000002", amount: "500" }, | |
| ], | |
| environment: "base-sepolia", | |
| }), | |
| ).rejects.toThrow("message_hash mismatch"); | |
| }); | |
| + it("rejects a malicious backend that substitutes the commitment", async () => { | |
| + const keys = await getTestKeys(); | |
| + const WRONG_COMMITMENT = BigInt( | |
| + "9999999999999999999999999999999999999999999999999999", | |
| + ); | |
| + const wrongHCommitments = poseidonHash([WRONG_COMMITMENT]); | |
| + const hInputs = poseidonHash([111n, 222n]); | |
| + const hNullifiers = poseidonHash([333n]); | |
| + const wrongMsgHash = poseidonHash([ | |
| + hInputs, | |
| + hNullifiers, | |
| + wrongHCommitments, | |
| + ]); | |
| + const mockFetch = vi.fn(async (input: RequestInfo | URL, _init?: RequestInit) => { | |
| + const url = input instanceof Request ? input.url : String(input); | |
| + if (url.includes("/prepare/")) { | |
| + return new Response( | |
| + JSON.stringify({ | |
| + data: { | |
| + tx_id: "tx-evil", | |
| + status: "prepared", | |
| + signing_request: { | |
| + message_hash: wrongMsgHash.toString(), | |
| + public_inputs: { | |
| + merkle_root: "111", | |
| + context_hash: "222", | |
| + nullifiers: ["333"], | |
| + all_commitments_out: [WRONG_COMMITMENT.toString()], | |
| + }, | |
| + selected_notes: [{ leaf_index: 0, amount: "1000" }], | |
| + circuit_id: "spend_10x4_v1", | |
| + }, | |
| + }, | |
| + }), | |
| + { status: 201, headers: { "Content-Type": "application/json" } }, | |
| + ); | |
| + } | |
| + if (url.includes("/submit")) { | |
| + return new Response( | |
| + JSON.stringify({ data: { tx_id: "tx-evil", status: "accepted" } }), | |
| + { status: 200, headers: { "Content-Type": "application/json" } }, | |
| + ); | |
| + } | |
| + return new Response("Not Found", { status: 404 }); | |
| + }); | |
| + | |
| + const client = createHttpClient(BASE_URL, API_KEY, mockFetch); | |
| + | |
| + await expect( | |
| + transfer(client, { | |
| + senderKeys: keys, | |
| + transfers: [ | |
| + { recipientAddress: "0x0000000000000000000000000000000000000001", token: "0x0000000000000000000000000000000000000002", amount: "500" }, | |
| + ], | |
| + environment: "base-sepolia", | |
| + }), | |
| + ).rejects.toThrow("commitment mismatch"); | |
| + }); | |
| + | |
| it("rejects public_inputs with values outside BN254 field", async () => { | |
| const keys = await getTestKeys(); | |
| const P = SNARK_SCALAR_FIELD.toString(); | |
| @@ -260,7 +341,7 @@ describe("transfer", () => { | |
| merkle_root: P, | |
| context_hash: "222", | |
| nullifiers: ["333"], | |
| - all_commitments_out: ["444"], | |
| + all_commitments_out: [COMMITMENT1.toString()], | |
| }, | |
| selected_notes: [{ leaf_index: 0, amount: "1000" }], | |
| circuit_id: "spend_10x4_v1", | |
| @@ -276,7 +357,7 @@ describe("transfer", () => { | |
| transfer(client, { | |
| senderKeys: keys, | |
| transfers: [ | |
| - { recipientAddress: "unlink1x", token: "0xT", amount: "1" }, | |
| + { recipientAddress: "0x0000000000000000000000000000000000000001", token: "0x0000000000000000000000000000000000000002", amount: "500" }, | |
| ], | |
| environment: "base-sepolia", | |
| }), | |
| @@ -301,7 +382,7 @@ describe("transfer", () => { | |
| merkle_root: "1", | |
| context_hash: "2", | |
| nullifiers: ["3"], | |
| - all_commitments_out: ["4"], | |
| + all_commitments_out: [SUBMIT_COMMITMENT.toString()], | |
| }, | |
| selected_notes: [], | |
| circuit_id: "spend_10x4_v1", | |
| @@ -327,7 +408,7 @@ describe("transfer", () => { | |
| await transfer(client, { | |
| senderKeys: keys, | |
| transfers: [ | |
| - { recipientAddress: "unlink1x", token: "0xT", amount: "1" }, | |
| + { recipientAddress: "0x0000000000000000000000000000000000000005", token: "0x0000000000000000000000000000000000000006", amount: "1" }, | |
| ], | |
| environment: "base-sepolia", | |
| }); | |
| diff --git a/protocol/sdk/src/__tests__/unlink.test.ts b/protocol/sdk/src/__tests__/unlink.test.ts | |
| index d0f067f..6c23fc3 100644 | |
| --- a/protocol/sdk/src/__tests__/unlink.test.ts | |
| +++ b/protocol/sdk/src/__tests__/unlink.test.ts | |
| @@ -1,4 +1,5 @@ | |
| import { afterEach, describe, expect, it, vi } from "vitest"; | |
| +import { poseidon1, poseidon2, poseidon3, poseidon4, poseidon5 } from "poseidon-lite"; | |
| import { account } from "../account-provider.js"; | |
| import { | |
| @@ -15,6 +16,22 @@ const BASE_URL = "http://localhost:3000"; | |
| const MNEMONIC = | |
| "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about"; | |
| +const POSEIDON_FNS: ((inputs: bigint[]) => bigint)[] = [ | |
| + () => 0n, poseidon1, poseidon2, poseidon3, poseidon4, poseidon5, | |
| +]; | |
| +function poseidonHash(inputs: bigint[]): bigint { | |
| + return POSEIDON_FNS[inputs.length]!(inputs); | |
| +} | |
| + | |
| +const WITHDRAW_NPK = 1n; | |
| +const WITHDRAW_TOKEN = 2n; | |
| +const WITHDRAW_COMMITMENT = poseidonHash([WITHDRAW_NPK, WITHDRAW_TOKEN, 5n]); | |
| + | |
| +const hInputs = poseidonHash([1n, 2n]); | |
| +const hNullifiers = poseidonHash([3n]); | |
| +const hCommitments = poseidonHash([WITHDRAW_COMMITMENT]); | |
| +const WITHDRAW_MESSAGE_HASH = poseidonHash([hInputs, hNullifiers, hCommitments]).toString(); | |
| + | |
| describe("createClient", () => { | |
| afterEach(() => { | |
| vi.unstubAllGlobals(); | |
| @@ -193,19 +210,33 @@ describe("createClient", () => { | |
| } | |
| if (url.includes("/transactions/prepare/transfer")) { | |
| + const tokenAddr = "0x0000000000000000000000000000000000000002"; | |
| + const transfer1Commitment = poseidonHash([ | |
| + BigInt("0x" + "0000000000000000000000000000000000000001".slice(2).padStart(64, "0")), | |
| + BigInt("0x" + tokenAddr.slice(2).padStart(64, "0")), | |
| + 1n, | |
| + ]); | |
| + const transfer2Commitment = poseidonHash([ | |
| + BigInt("0x" + "0000000000000000000000000000000000000003".slice(2).padStart(64, "0")), | |
| + BigInt("0x" + tokenAddr.slice(2).padStart(64, "0")), | |
| + 2n, | |
| + ]); | |
| + const hInputs = poseidonHash([1n, 2n]); | |
| + const hNullifiers = poseidonHash([3n]); | |
| + const hCommitments = poseidonHash([transfer1Commitment, transfer2Commitment]); | |
| + const messageHash = poseidonHash([hInputs, hNullifiers, hCommitments]); | |
| return new Response( | |
| JSON.stringify({ | |
| data: { | |
| tx_id: "tx-transfer", | |
| status: "prepared", | |
| signing_request: { | |
| - message_hash: | |
| - "21068280238279045573778304197429874446618930746644087956846565421834592537736", | |
| + message_hash: messageHash.toString(), | |
| public_inputs: { | |
| merkle_root: "1", | |
| context_hash: "2", | |
| nullifiers: ["3"], | |
| - all_commitments_out: ["4"], | |
| + all_commitments_out: [transfer1Commitment.toString(), transfer2Commitment.toString()], | |
| }, | |
| selected_notes: [], | |
| circuit_id: "spend_10x4_v1", | |
| @@ -235,11 +266,12 @@ describe("createClient", () => { | |
| account: account.fromMnemonic({ mnemonic: MNEMONIC }), | |
| }); | |
| + const tokenAddr = "0x0000000000000000000000000000000000000002"; | |
| const result = await unlink.transfer({ | |
| - token: "0xToken", | |
| + token: tokenAddr, | |
| transfers: [ | |
| - { recipientAddress: "unlink1recipient", amount: "1" }, | |
| - { recipientAddress: "unlink1recipient2", amount: "2" }, | |
| + { recipientAddress: "0x0000000000000000000000000000000000000001", amount: "1" }, | |
| + { recipientAddress: "0x0000000000000000000000000000000000000003", amount: "2" }, | |
| ], | |
| }); | |
| @@ -257,13 +289,13 @@ describe("createClient", () => { | |
| expect(prepareBody.environment).toBe("base-sepolia"); | |
| expect(prepareBody.transfers).toEqual([ | |
| { | |
| - unlink_address: "unlink1recipient", | |
| - token: "0xToken", | |
| + unlink_address: "0x0000000000000000000000000000000000000001", | |
| + token: tokenAddr, | |
| amount: "1", | |
| }, | |
| { | |
| - unlink_address: "unlink1recipient2", | |
| - token: "0xToken", | |
| + unlink_address: "0x0000000000000000000000000000000000000003", | |
| + token: tokenAddr, | |
| amount: "2", | |
| }, | |
| ]); | |
| @@ -304,13 +336,12 @@ describe("createClient", () => { | |
| tx_id: "tx-withdraw", | |
| status: "prepared", | |
| signing_request: { | |
| - message_hash: | |
| - "21068280238279045573778304197429874446618930746644087956846565421834592537736", | |
| + message_hash: WITHDRAW_MESSAGE_HASH, | |
| public_inputs: { | |
| merkle_root: "1", | |
| context_hash: "2", | |
| nullifiers: ["3"], | |
| - all_commitments_out: ["4"], | |
| + all_commitments_out: [WITHDRAW_COMMITMENT.toString()], | |
| }, | |
| selected_notes: [], | |
| circuit_id: "spend_10x4_v1", | |
| @@ -341,8 +372,8 @@ describe("createClient", () => { | |
| }); | |
| const result = await unlink.withdraw({ | |
| - recipientEvmAddress: "0xRecipient", | |
| - token: "0xToken", | |
| + recipientEvmAddress: "0x0000000000000000000000000000000000000001", | |
| + token: "0x0000000000000000000000000000000000000002", | |
| amount: "5", | |
| }); | |
| @@ -358,8 +389,8 @@ describe("createClient", () => { | |
| const prepareBody = await prepareRequest.json(); | |
| expect(prepareBody.environment).toBe("base-sepolia"); | |
| - expect(prepareBody.evm_address).toBe("0xRecipient"); | |
| - expect(prepareBody.token).toBe("0xToken"); | |
| + expect(prepareBody.evm_address).toBe("0x0000000000000000000000000000000000000001"); | |
| + expect(prepareBody.token).toBe("0x0000000000000000000000000000000000000002"); | |
| expect(prepareBody.amount).toBe("5"); | |
| }); | |
| diff --git a/protocol/sdk/src/__tests__/withdraw.test.ts b/protocol/sdk/src/__tests__/withdraw.test.ts | |
| index 2cd6478..e9acac2 100644 | |
| --- a/protocol/sdk/src/__tests__/withdraw.test.ts | |
| +++ b/protocol/sdk/src/__tests__/withdraw.test.ts | |
| @@ -1,4 +1,5 @@ | |
| import { describe, expect, it, vi } from "vitest"; | |
| +import { poseidon1, poseidon2, poseidon3, poseidon4, poseidon5 } from "poseidon-lite"; | |
| import { createHttpClient } from "../client.js"; | |
| import { ApiError } from "../errors.js"; | |
| @@ -8,12 +9,30 @@ import { withdraw } from "../transactions/withdraw.js"; | |
| const BASE_URL = "http://localhost:3000"; | |
| const API_KEY = "test-key"; | |
| -// Poseidon(111, 222, 333, 444) — must match the mock public_inputs | |
| -const TEST_MESSAGE_HASH = | |
| - "19254840519170188545862775101913657432245463725821477914502433725206377151920"; | |
| -// Poseidon(1, 2, 3, 4) — for submit-failure mock | |
| -const TEST_MESSAGE_HASH_SUBMIT = | |
| - "21068280238279045573778304197429874446618930746644087956846565421834592537736"; | |
| +const POSEIDON_FNS: ((inputs: bigint[]) => bigint)[] = [ | |
| + () => 0n, poseidon1, poseidon2, poseidon3, poseidon4, poseidon5, | |
| +]; | |
| +function poseidonHash(inputs: bigint[]): bigint { | |
| + return POSEIDON_FNS[inputs.length]!(inputs); | |
| +} | |
| + | |
| +const RECIPIENT_NPK = 1n; | |
| +const TOKEN_FR = 2n; | |
| +const WITHDRAWAL_COMMITMENT = poseidonHash([RECIPIENT_NPK, TOKEN_FR, 2000n]); | |
| + | |
| +const hInputs = poseidonHash([111n, 222n]); | |
| +const hNullifiers = poseidonHash([333n]); | |
| +const hCommitments = poseidonHash([WITHDRAWAL_COMMITMENT]); | |
| +const TEST_MESSAGE_HASH = poseidonHash([hInputs, hNullifiers, hCommitments]).toString(); | |
| + | |
| +const SUBMIT_RECIPIENT_NPK = 3n; | |
| +const SUBMIT_TOKEN_FR = 4n; | |
| +const SUBMIT_WITHDRAWAL_COMMITMENT = poseidonHash([SUBMIT_RECIPIENT_NPK, SUBMIT_TOKEN_FR, 100n]); | |
| + | |
| +const hInputs2 = poseidonHash([1n, 2n]); | |
| +const hNullifiers2 = poseidonHash([3n]); | |
| +const hCommitments2 = poseidonHash([SUBMIT_WITHDRAWAL_COMMITMENT]); | |
| +const TEST_MESSAGE_HASH_SUBMIT = poseidonHash([hInputs2, hNullifiers2, hCommitments2]).toString(); | |
| async function getTestKeys() { | |
| const seed = new Uint8Array(64).fill(42); | |
| @@ -56,7 +75,7 @@ describe("withdraw", () => { | |
| merkle_root: "111", | |
| context_hash: "222", | |
| nullifiers: ["333"], | |
| - all_commitments_out: ["444"], | |
| + all_commitments_out: [WITHDRAWAL_COMMITMENT.toString()], | |
| }, | |
| selected_notes: [{ leaf_index: 0, amount: "2000" }], | |
| circuit_id: "spend_10x4_v1", | |
| @@ -71,8 +90,8 @@ describe("withdraw", () => { | |
| const client = createHttpClient(BASE_URL, API_KEY, mockFetch); | |
| const result = await withdraw(client, { | |
| senderKeys: keys, | |
| - recipientEvmAddress: "0xRecipient", | |
| - token: "0xToken", | |
| + recipientEvmAddress: "0x0000000000000000000000000000000000000001", | |
| + token: "0x0000000000000000000000000000000000000002", | |
| amount: "2000", | |
| environment: "base-sepolia", | |
| }); | |
| @@ -87,8 +106,8 @@ describe("withdraw", () => { | |
| const prepareCall = mockFetch.mock.calls[0]; | |
| const prepareBody = await (prepareCall[0] as Request).json(); | |
| expect(prepareBody.unlink_address).toBe(keys.address); | |
| - expect(prepareBody.evm_address).toBe("0xRecipient"); | |
| - expect(prepareBody.token).toBe("0xToken"); | |
| + expect(prepareBody.evm_address).toBe("0x0000000000000000000000000000000000000001"); | |
| + expect(prepareBody.token).toBe("0x0000000000000000000000000000000000000002"); | |
| expect(prepareBody.amount).toBe("2000"); | |
| expect(prepareBody.environment).toBe("base-sepolia"); | |
| @@ -119,8 +138,8 @@ describe("withdraw", () => { | |
| try { | |
| await withdraw(client, { | |
| senderKeys: keys, | |
| - recipientEvmAddress: "0xRecipient", | |
| - token: "0xToken", | |
| + recipientEvmAddress: "0x0000000000000000000000000000000000000001", | |
| + token: "0x0000000000000000000000000000000000000002", | |
| amount: "100", | |
| environment: "base-sepolia", | |
| }); | |
| @@ -132,6 +151,88 @@ describe("withdraw", () => { | |
| } | |
| }); | |
| + it("rejects a malicious backend that substitutes the commitment", async () => { | |
| + const keys = await getTestKeys(); | |
| + // Compute message_hash using the malicious wrong commitment so the | |
| + // message_hash verification passes (message_hash is consistent with | |
| + // the returned all_commitments_out). The SDK then independently derives | |
| + // the expected commitment from client-known params and compares it | |
| + // against the returned commitment — the mismatch is caught here. | |
| + // | |
| + // hInputs = Poseidon([111, 222]) | |
| + // hNullifiers = Poseidon([333]) | |
| + // hCommitments = Poseidon([wrong_commitment]) | |
| + // message_hash = Poseidon([hInputs, hNullifiers, hCommitments]) | |
| + // | |
| + // Expected (honest) commitment = Poseidon([1, 2, 2000]) | |
| + // Wrong (malicious) commitment = 7777... | |
| + // This forces the SDK to reject via commitment mismatch after the | |
| + // message_hash check passes (since the hash is computed from the | |
| + // same wrong commitment the mock returns). | |
| + const WRONG_COMMITMENT = BigInt( | |
| + "7777777777777777777777777777777777777777777777777777", | |
| + ); | |
| + const wrongHCommitments = poseidonHash([WRONG_COMMITMENT]); | |
| + const hInputs = poseidonHash([111n, 222n]); | |
| + const hNullifiers = poseidonHash([333n]); | |
| + const wrongMsgHash = poseidonHash([ | |
| + hInputs, | |
| + hNullifiers, | |
| + wrongHCommitments, | |
| + ]); | |
| + const mockFetch = vi.fn(async (input: RequestInfo | URL, _init?: RequestInit) => { | |
| + const url = input instanceof Request ? input.url : String(input); | |
| + | |
| + if (url.includes("/transactions/prepare/withdraw")) { | |
| + return new Response( | |
| + JSON.stringify({ | |
| + data: { | |
| + tx_id: "tx-uuid-evil", | |
| + status: "prepared", | |
| + signing_request: { | |
| + message_hash: wrongMsgHash.toString(), | |
| + public_inputs: { | |
| + merkle_root: "111", | |
| + context_hash: "222", | |
| + nullifiers: ["333"], | |
| + // Malicious backend substituted the wrong commitment. | |
| + all_commitments_out: [WRONG_COMMITMENT.toString()], | |
| + }, | |
| + selected_notes: [{ leaf_index: 0, amount: "2000" }], | |
| + circuit_id: "spend_10x4_v1", | |
| + }, | |
| + }, | |
| + }), | |
| + { status: 201, headers: { "Content-Type": "application/json" } }, | |
| + ); | |
| + } | |
| + | |
| + if (url.includes("/submit")) { | |
| + return new Response( | |
| + JSON.stringify({ data: { tx_id: "tx-uuid-evil", status: "accepted" } }), | |
| + { status: 200, headers: { "Content-Type": "application/json" } }, | |
| + ); | |
| + } | |
| + | |
| + return new Response("Not Found", { status: 404 }); | |
| + }); | |
| + | |
| + const client = createHttpClient(BASE_URL, API_KEY, mockFetch); | |
| + | |
| + try { | |
| + await withdraw(client, { | |
| + senderKeys: keys, | |
| + recipientEvmAddress: "0x0000000000000000000000000000000000000001", | |
| + token: "0x0000000000000000000000000000000000000002", | |
| + amount: "2000", | |
| + environment: "base-sepolia", | |
| + }); | |
| + expect.unreachable("malicious backend should have been rejected"); | |
| + } catch (err) { | |
| + expect((err as Error).message).toContain("commitment mismatch"); | |
| + } | |
| + }); | |
| + | |
| it("throws on submit failure", async () => { | |
| const keys = await getTestKeys(); | |
| const mockFetch = vi.fn( | |
| @@ -150,7 +251,7 @@ describe("withdraw", () => { | |
| merkle_root: "1", | |
| context_hash: "2", | |
| nullifiers: ["3"], | |
| - all_commitments_out: ["4"], | |
| + all_commitments_out: [SUBMIT_WITHDRAWAL_COMMITMENT.toString()], | |
| }, | |
| selected_notes: [], | |
| circuit_id: "spend_10x4_v1", | |
| @@ -175,8 +276,8 @@ describe("withdraw", () => { | |
| try { | |
| await withdraw(client, { | |
| senderKeys: keys, | |
| - recipientEvmAddress: "0xRecipient", | |
| - token: "0xToken", | |
| + recipientEvmAddress: "0x0000000000000000000000000000000000000003", | |
| + token: "0x0000000000000000000000000000000000000004", | |
| amount: "100", | |
| environment: "base-sepolia", | |
| }); | |
| @@ -187,4 +288,4 @@ describe("withdraw", () => { | |
| expect((err as ApiError).detail).toBe("Transaction expired"); | |
| } | |
| }); | |
| -}); | |
| +}); | |
| \ No newline at end of file | |
| diff --git a/protocol/sdk/src/burner.ts b/protocol/sdk/src/burner.ts | |
| index 70fcbbc..d016f25 100644 | |
| --- a/protocol/sdk/src/burner.ts | |
| +++ b/protocol/sdk/src/burner.ts | |
| @@ -2,6 +2,7 @@ import type { Client as HttpClient } from "openapi-fetch"; | |
| import type { Address } from "viem"; | |
| import { generatePrivateKey, privateKeyToAccount } from "viem/accounts"; | |
| +import { poseidon } from "./crypto/poseidon.js"; | |
| import { ApiError } from "./errors.js"; | |
| import type { components, paths } from "./generated/types.js"; | |
| import type { AccountKeys } from "./keys/types.js"; | |
| @@ -83,6 +84,22 @@ export class BurnerWallet { | |
| } | |
| const { tx_id, signing_request } = data.data; | |
| + const { public_inputs } = signing_request; | |
| + | |
| + const burnerNpK = BigInt("0x" + this.address.slice(2).padStart(64, "0")); | |
| + const tokenFr = BigInt("0x" + params.token.slice(2).padStart(64, "0")); | |
| + const amountFr = BigInt(params.amount); | |
| + const expectedOutputCommitment = poseidon([burnerNpK, tokenFr, amountFr]); | |
| + | |
| + const commitmentsOut = public_inputs.all_commitments_out.map((v) => | |
| + BigInt(v), | |
| + ); | |
| + if (!commitmentsOut.includes(expectedOutputCommitment)) { | |
| + throw new Error( | |
| + `commitment mismatch: expected ${expectedOutputCommitment}, got ${commitmentsOut.join(", ")}`, | |
| + ); | |
| + } | |
| + | |
| return signAndSubmit(apiClient, { | |
| txId: tx_id, | |
| messageHash: signing_request.message_hash, | |
| diff --git a/protocol/sdk/src/transactions/transfer.ts b/protocol/sdk/src/transactions/transfer.ts | |
| index 1f36efe..4c42539 100644 | |
| --- a/protocol/sdk/src/transactions/transfer.ts | |
| +++ b/protocol/sdk/src/transactions/transfer.ts | |
| @@ -1,5 +1,6 @@ | |
| import type { Client as HttpClient } from "openapi-fetch"; | |
| +import { poseidon } from "../crypto/poseidon.js"; | |
| import { ApiError } from "../errors.js"; | |
| import type { paths } from "../generated/types.js"; | |
| import type { AccountKeys } from "../keys/types.js"; | |
| @@ -46,6 +47,27 @@ export async function transfer( | |
| } | |
| const { tx_id, signing_request } = prepareData.data; | |
| + const { public_inputs } = signing_request; | |
| + | |
| + const expectedCommitments = params.transfers.map((t) => | |
| + poseidon([ | |
| + BigInt("0x" + t.recipientAddress.slice(2).padStart(64, "0")), | |
| + BigInt("0x" + t.token.slice(2).padStart(64, "0")), | |
| + BigInt(t.amount), | |
| + ]), | |
| + ); | |
| + | |
| + const commitmentsOut = public_inputs.all_commitments_out.map((v) => | |
| + BigInt(v), | |
| + ); | |
| + for (const expected of expectedCommitments) { | |
| + if (!commitmentsOut.includes(expected)) { | |
| + throw new Error( | |
| + `commitment mismatch: expected ${expected}, got ${commitmentsOut.join(", ")}`, | |
| + ); | |
| + } | |
| + } | |
| + | |
| return signAndSubmit(client, { | |
| txId: tx_id, | |
| messageHash: signing_request.message_hash, | |
| diff --git a/protocol/sdk/src/transactions/withdraw.ts b/protocol/sdk/src/transactions/withdraw.ts | |
| index 4d1ec78..e0d5d0e 100644 | |
| --- a/protocol/sdk/src/transactions/withdraw.ts | |
| +++ b/protocol/sdk/src/transactions/withdraw.ts | |
| @@ -1,5 +1,7 @@ | |
| import type { Client as HttpClient } from "openapi-fetch"; | |
| +import { toBytesBE } from "../crypto/field.js"; | |
| +import { poseidon } from "../crypto/poseidon.js"; | |
| import { ApiError } from "../errors.js"; | |
| import type { paths } from "../generated/types.js"; | |
| import type { AccountKeys } from "../keys/types.js"; | |
| @@ -48,6 +50,22 @@ export async function withdraw( | |
| } | |
| const { tx_id, signing_request } = prepareData.data; | |
| + const { public_inputs } = signing_request; | |
| + | |
| + const npk = bytesToNpk(params.recipientEvmAddress); | |
| + const tokenFr = hexToField(params.token); | |
| + const amountFr = BigInt(params.amount); | |
| + const expectedOutputCommitment = poseidon([npk, tokenFr, amountFr]); | |
| + | |
| + const commitmentsOut = public_inputs.all_commitments_out.map((v) => | |
| + BigInt(v), | |
| + ); | |
| + if (!commitmentsOut.includes(expectedOutputCommitment)) { | |
| + throw new Error( | |
| + `commitment mismatch: expected ${expectedOutputCommitment}, got ${commitmentsOut.join(", ")}`, | |
| + ); | |
| + } | |
| + | |
| return signAndSubmit(client, { | |
| txId: tx_id, | |
| messageHash: signing_request.message_hash, | |
| @@ -56,3 +74,14 @@ export async function withdraw( | |
| operationLabel: "Withdraw", | |
| }); | |
| } | |
| + | |
| +function bytesToNpk(evmAddress: string): bigint { | |
| + const hex = evmAddress.startsWith("0x") ? evmAddress.slice(2) : evmAddress; | |
| + const padded = hex.padStart(64, "0"); | |
| + return BigInt("0x" + padded); | |
| +} | |
| + | |
| +function hexToField(hex: string): bigint { | |
| + const clean = hex.startsWith("0x") ? hex.slice(2) : hex; | |
| + return BigInt("0x" + clean.padStart(64, "0")); | |
| +} | |
| diff --git a/protocol/zk/circuits/spend.circom b/protocol/zk/circuits/spend.circom | |
| index b60104d..fa1e054 100644 | |
| --- a/protocol/zk/circuits/spend.circom | |
| +++ b/protocol/zk/circuits/spend.circom | |
| @@ -139,6 +139,16 @@ template Spend(nIn, nOut, MERKLE_DEPTH) { | |
| totalIn += valueIn[i]; | |
| } | |
| + component equal[nIn][nIn]; | |
| + for (var i = 0; i < nIn; i++) { | |
| + for (var j = i + 1; j < nIn; j++) { | |
| + equal[i][j] = IsEqual(); | |
| + equal[i][j].in[0] <== nullifiers[i]; | |
| + equal[i][j].in[1] <== nullifiers[j]; | |
| + equal[i][j].out * (nullifierIsZero[i].out + nullifierIsZero[j].out) === 0; | |
| + } | |
| + } | |
| + | |
| // -- Output notes: per-slot sentinel-zero constraints + accumulate values -- | |
| signal rangeBits[nOut][120]; | |
| component outHash[nOut]; | |
| diff --git a/protocol/zk/package.json b/protocol/zk/package.json | |
| index d9149cd..ce8e278 100644 | |
| --- a/protocol/zk/package.json | |
| +++ b/protocol/zk/package.json | |
| @@ -3,7 +3,7 @@ | |
| "version": "0.1.0", | |
| "private": true, | |
| "scripts": { | |
| - "pretest": "bash scripts/codegen.sh circuits-json", | |
| + "pretest": "bash scripts/codegen.sh circuits-json && bash scripts/preflight-test-zk.sh", | |
| "test": "vitest run --no-file-parallelism", | |
| "test:watch": "vitest", | |
| "typecheck": "tsc --noEmit", | |
| @@ -22,5 +22,8 @@ | |
| "poseidon-lite": "^0.3.0", | |
| "tsx": "^4.0.0", | |
| "vitest": "^4.0.1" | |
| + }, | |
| + "overrides": { | |
| + "snarkjs": "^0.7.6" | |
| } | |
| } | |
| diff --git a/protocol/zk/scripts/download-artifacts.sh b/protocol/zk/scripts/download-artifacts.sh | |
| index cf4c045..1321448 100755 | |
| --- a/protocol/zk/scripts/download-artifacts.sh | |
| +++ b/protocol/zk/scripts/download-artifacts.sh | |
| @@ -55,13 +55,133 @@ SENTINEL="$ARTIFACTS_DIR/.bootstrap-complete-${VERSION}" | |
| BASE_URL="${ARTIFACTS_URL}/${VERSION}" | |
| -# Fast-path: if a prior run for this exact version completed successfully, skip | |
| -# the remote manifest fetch entirely. Keeps warm boots tolerant to R2 outages | |
| -# (entrypoint would otherwise crash-loop the machine on transient R2 errors | |
| -# even when every artifact is already SHA-verified on disk). | |
| +# --- Build local circuit allowlist --- | |
| +# Remote circuit names must be in this set to prevent path traversal. | |
| +ZK_DIR="$REPO_ROOT/protocol/zk" | |
| +LOCAL_ALLOWLIST=() | |
| +if [ -f "$ZK_DIR/circuits.json" ]; then | |
| + while IFS= read -r variant; do | |
| + LOCAL_ALLOWLIST+=("$variant") | |
| + done < <(jq -r 'keys[]' "$ZK_DIR/circuits.json" 2>/dev/null | sort) | |
| +fi | |
| + | |
| +CIRCUIT_NAME_REGEX='^[A-Za-z0-9_-]+$' | |
| + | |
| +validate_circuit_name() { | |
| + local name="$1" | |
| + if [[ ! "$name" =~ $CIRCUIT_NAME_REGEX ]]; then | |
| + echo "Error: circuit name '$name' contains invalid characters (allowed: A-Za-z0-9_-)" >&2 | |
| + return 1 | |
| + fi | |
| + if [ ${#LOCAL_ALLOWLIST[@]} -gt 0 ]; then | |
| + local found=0 | |
| + for allowed in "${LOCAL_ALLOWLIST[@]}"; do | |
| + if [ "$name" = "$allowed" ]; then | |
| + found=1 | |
| + break | |
| + fi | |
| + done | |
| + if [ "$found" -eq 0 ]; then | |
| + echo "Error: circuit name '$name' is not in the local circuit allowlist" >&2 | |
| + echo "Allowed circuits: ${LOCAL_ALLOWLIST[*]}" >&2 | |
| + return 1 | |
| + fi | |
| + fi | |
| + return 0 | |
| +} | |
| + | |
| +canonicalize_and_validate() { | |
| + local rel_path="$1" | |
| + local canonical base_dir | |
| + | |
| + # Canonicalize the full destination path (resolves ../ but not symlinks) | |
| + canonical="$(realpath -m "$ARTIFACTS_DIR/$rel_path" 2>/dev/null || echo "")" | |
| + if [ -z "$canonical" ]; then | |
| + echo "Error: could not canonicalize path '$rel_path'" >&2 | |
| + return 1 | |
| + fi | |
| + | |
| + # Canonicalize the base directory | |
| + base_dir="$(realpath -m "$ARTIFACTS_DIR" 2>/dev/null || echo "")" | |
| + if [ -z "$base_dir" ]; then | |
| + echo "Error: could not canonicalize artifact directory '$ARTIFACTS_DIR'" >&2 | |
| + return 1 | |
| + fi | |
| + | |
| + # Ensure the canonical path is under the artifact directory | |
| + case "$canonical" in | |
| + "$base_dir"*) ;; | |
| + *) | |
| + echo "Error: path traversal attempt detected: '$rel_path' resolves to '$canonical'" >&2 | |
| + return 1 | |
| + ;; | |
| + esac | |
| + | |
| + echo "$canonical" | |
| + return 0 | |
| +} | |
| + | |
| +# Fast-path: if a prior run for this exact version completed successfully, verify | |
| +# local artifacts before skipping the remote fetch. | |
| if [ "$SKIP_IF_COMPLETE" = "true" ] && [ -f "$SENTINEL" ]; then | |
| - echo "==> Bootstrap sentinel present for version $VERSION — skipping download." | |
| - exit 0 | |
| + echo "==> Bootstrap sentinel present for version $VERSION — verifying local artifacts..." | |
| + | |
| + # Verify all expected artifacts exist and match checksums before skipping. | |
| + # This ensures the sentinel only bypasses R2 when local integrity is proven. | |
| + MANIFEST_FILE="$ARTIFACTS_DIR/manifest.json" | |
| + if [ ! -f "$MANIFEST_FILE" ]; then | |
| + echo "Warning: sentinel present but manifest missing — re-running full bootstrap" >&2 | |
| + else | |
| + manifest_ok=true | |
| + while IFS= read -r circuit; do | |
| + if ! validate_circuit_name "$circuit"; then | |
| + manifest_ok=false | |
| + break | |
| + fi | |
| + | |
| + checksums_file="$ARTIFACTS_DIR/checksums/${circuit}.json" | |
| + if [ ! -f "$checksums_file" ]; then | |
| + echo "Warning: missing checksums for '$circuit' — re-running bootstrap" >&2 | |
| + manifest_ok=false | |
| + break | |
| + fi | |
| + | |
| + # Verify each artifact in the checksums file | |
| + while IFS= read -r filename; do | |
| + [ -n "$filename" ] || continue | |
| + | |
| + expected="$(jq -r --arg f "$filename" '.files[$f].sha256 // empty' "$checksums_file")" | |
| + [ -z "$expected" ] && continue | |
| + | |
| + # Determine local path based on file extension | |
| + case "$filename" in | |
| + *.zkey) local_path="$CIRCUITS_DIR/$filename" ;; | |
| + *.wasm) local_path="$WASM_DIR/$circuit/$filename" ;; | |
| + *.bin) local_path="$WASM_DIR/$circuit/$filename" ;; | |
| + witness_calculator.js) local_path="$WASM_DIR/$circuit/$filename" ;; | |
| + *) local_path="$ARTIFACTS_DIR/$filename" ;; | |
| + esac | |
| + | |
| + if [ ! -f "$local_path" ]; then | |
| + echo "Warning: missing '$local_path' — re-running bootstrap" >&2 | |
| + manifest_ok=false | |
| + break 2 | |
| + fi | |
| + | |
| + actual="$(sha256_cmd "$local_path")" | |
| + if [ "$actual" != "$expected" ]; then | |
| + echo "Warning: checksum mismatch for '$local_path' — re-running bootstrap" >&2 | |
| + manifest_ok=false | |
| + break 2 | |
| + fi | |
| + done < <(jq -r '.files | keys[]' "$checksums_file") | |
| + done < <(jq -r '.circuits[]' "$MANIFEST_FILE") | |
| + | |
| + if [ "$manifest_ok" = "true" ]; then | |
| + echo " All local artifacts verified. Skipping download. ✓" | |
| + exit 0 | |
| + fi | |
| + fi | |
| fi | |
| # --- Helpers --- | |
| @@ -121,6 +241,16 @@ fi | |
| echo " Circuits: $circuit_count" | |
| +# --- Step 1b: Validate all remote circuit names --- | |
| +echo "==> Validating circuit names against allowlist..." | |
| +for circuit in "${circuits[@]}"; do | |
| + if ! validate_circuit_name "$circuit"; then | |
| + echo "Error: invalid circuit name '$circuit' in manifest — aborting download" >&2 | |
| + exit 1 | |
| + fi | |
| +done | |
| +echo " All circuit names validated. ✓" | |
| + | |
| # --- Step 2: Fetch checksums --- | |
| echo "==> Fetching checksums..." | |
| @@ -145,10 +275,29 @@ pids=() | |
| download_and_verify() { | |
| local circuit="$1" filename="$2" rel_path="$3" | |
| + local canonicalized | |
| local dest="$ARTIFACTS_DIR/$rel_path" | |
| local url="$BASE_URL/$rel_path" | |
| local checksums_file="$CHECKSUMS_DIR/${circuit}.json" | |
| + # Validate circuit name before any path operations | |
| + if ! validate_circuit_name "$circuit"; then | |
| + echo "Error: invalid circuit name '$circuit'" >&2 | |
| + return 2 | |
| + fi | |
| + | |
| + # Validate path doesn't escape artifact directory | |
| + if ! canonicalized="$(canonicalize_and_validate "$rel_path")"; then | |
| + echo "Error: path validation failed for '$rel_path'" >&2 | |
| + return 2 | |
| + fi | |
| + | |
| + # Validate checksum file key before using it | |
| + if ! echo "$filename" | grep -qE "$CIRCUIT_NAME_REGEX"; then | |
| + echo "Error: invalid filename '$filename' in checksums" >&2 | |
| + return 2 | |
| + fi | |
| + | |
| # Get expected checksum | |
| local expected | |
| expected="$(jq -r --arg f "$filename" '.files[$f].sha256 // empty' "$checksums_file")" | |
| @@ -234,6 +383,13 @@ echo "==> Verifying all artifacts..." | |
| verify_errors=0 | |
| for circuit in "${circuits[@]}"; do | |
| + # Validate circuit name | |
| + if ! validate_circuit_name "$circuit"; then | |
| + echo " ✗ Invalid circuit name: $circuit" >&2 | |
| + verify_errors=$((verify_errors + 1)) | |
| + continue | |
| + fi | |
| + | |
| checksums_file="$CHECKSUMS_DIR/${circuit}.json" | |
| while IFS= read -r filename; do | |
| diff --git a/protocol/zk/scripts/harden_verifier.py b/protocol/zk/scripts/harden_verifier.py | |
| index 6175ed5..219471a 100755 | |
| --- a/protocol/zk/scripts/harden_verifier.py | |
| +++ b/protocol/zk/scripts/harden_verifier.py | |
| @@ -28,31 +28,24 @@ G1_MUL_NEW = " if iszero(and(success, eq(returndatasize(), 0x40)) | |
| # 2. checkPairing's staticcall hits precompile 8 (BN254 pairing) returning 0x20 | |
| # bytes. Inline the size check in the success-AND-result combinator. | |
| PAIR_OLD = " isOk := and(success, mload(_pPairing))" | |
| -PAIR_NEW = ( | |
| - " isOk := and(and(success, eq(returndatasize(), 0x20)), mload(_pPairing))" | |
| -) | |
| +PAIR_NEW = " isOk := and(and(success, eq(returndatasize(), 0x20)), mload(_pPairing))" | |
| # 3. checkFieldQ: companion to existing checkField but validates against the | |
| # base-field modulus q (declared at file scope as `uint256 constant q`). | |
| -CHECKFIELD_ANCHOR = """ function checkField(v) { | |
| - if iszero(lt(v, r)) { | |
| - mstore(0, 0) | |
| - return(0, 0x20) | |
| - } | |
| - }""" | |
| -CHECKFIELDQ_ADDED = ( | |
| - CHECKFIELD_ANCHOR | |
| - + """ | |
| +# ROBUST ANCHOR: Use function signature instead of full multi-line block to | |
| +# avoid breakage from whitespace/indentation changes in snarkjs templates. | |
| +CHECKFIELD_FUNC_SIGNATURE = "function checkField(v)" | |
| +CHECKFIELDQ_ADDED = """ | |
| function checkFieldQ(v) { | |
| if iszero(lt(v, q)) { | |
| mstore(0, 0) | |
| return(0, 0x20) | |
| } | |
| }""" | |
| -) | |
| # 4. Insert proof-coord Fq checks right before the existing pubsignal Fr checks. | |
| -COORD_CHECKS_ANCHOR = " // Validate that all evaluations ∈ F" | |
| +# ROBUST ANCHOR: Use unique function signature instead of multi-line comment. | |
| +COORD_CHECKS_ANCHOR_SIGNATURE = "// Validate that all evaluations" | |
| COORD_CHECKS_BLOCK = """ // Validate that proof points lie in the BN254 base field Fq | |
| checkFieldQ(calldataload(_pA)) | |
| checkFieldQ(calldataload(add(_pA, 32))) | |
| @@ -67,8 +60,29 @@ COORD_CHECKS_BLOCK = """ // Validate that proof points lie in the BN2 | |
| def harden(src: str) -> str: | |
| - if G1_MUL_NEW in src: | |
| - return src | |
| + # STRENGTHENED: Check ALL expected hardening markers before returning unchanged. | |
| + # Partial hardening (missing Fq checks but present returndata checks) must | |
| + # not be silently treated as complete. | |
| + has_g1_check = G1_MUL_NEW in src | |
| + has_pair_check = "eq(returndatasize(), 0x20)" in src and "isOk :=" in src | |
| + has_checkfieldq = "function checkFieldQ(v)" in src | |
| + has_coord_checks = ( | |
| + src.count("checkFieldQ(calldataload(") >= 6 | |
| + ) # At least 6 of 8 coords | |
| + | |
| + if has_g1_check and has_pair_check and has_checkfieldq and has_coord_checks: | |
| + return src # All hardening present - idempotent | |
| + | |
| + # Check for partial hardening state | |
| + partial_present = has_g1_check or has_pair_check or has_checkfieldq | |
| + if partial_present: | |
| + raise SystemExit( | |
| + "ERROR: Partial hardening detected. Expected all four hardening layers " | |
| + "(G1 returndata check, pairing returndata check, checkFieldQ helper, " | |
| + "and 8 proof-coordinate Fq checks), but not all are present. " | |
| + f"G1: {has_g1_check}, Pair: {has_pair_check}, " | |
| + f"checkFieldQ: {has_checkfieldq}, Coord checks: {has_coord_checks}" | |
| + ) | |
| if src.count(G1_MUL_OLD) != 2: | |
| raise SystemExit( | |
| @@ -82,13 +96,15 @@ def harden(src: str) -> str: | |
| ) | |
| out = out.replace(PAIR_OLD, PAIR_NEW) | |
| - if out.count(CHECKFIELD_ANCHOR) != 1: | |
| + if CHECKFIELD_FUNC_SIGNATURE not in out: | |
| raise SystemExit("expected 1 checkField definition anchor") | |
| - out = out.replace(CHECKFIELD_ANCHOR, CHECKFIELDQ_ADDED) | |
| + out = out.replace( | |
| + CHECKFIELD_FUNC_SIGNATURE, CHECKFIELDQ_ADDED + CHECKFIELD_FUNC_SIGNATURE | |
| + ) | |
| - if out.count(COORD_CHECKS_ANCHOR) != 1: | |
| + if COORD_CHECKS_ANCHOR_SIGNATURE not in out: | |
| raise SystemExit("expected 1 'Validate that all evaluations' anchor") | |
| - out = out.replace(COORD_CHECKS_ANCHOR, COORD_CHECKS_BLOCK) | |
| + out = out.replace(COORD_CHECKS_ANCHOR_SIGNATURE, COORD_CHECKS_BLOCK) | |
| return out | |
| diff --git a/protocol/zk/scripts/preflight-test-zk.sh b/protocol/zk/scripts/preflight-test-zk.sh | |
| index a2c66c7..b17c341 100644 | |
| --- a/protocol/zk/scripts/preflight-test-zk.sh | |
| +++ b/protocol/zk/scripts/preflight-test-zk.sh | |
| @@ -11,9 +11,26 @@ ARTIFACTS_DIR="$ZK_DIR/artifacts" | |
| # circuit enters the audited artifact set. | |
| VARIANT="spend_10x4_v1" | |
| +sha256_cmd() { | |
| + if command -v sha256sum &>/dev/null; then | |
| + sha256sum "$1" | awk '{print $1}' | |
| + else | |
| + shasum -a 256 "$1" | awk '{print $1}' | |
| + fi | |
| +} | |
| + | |
| +# Verify artifact integrity against checksums before staging symlinks. | |
| +echo "==> Verifying ZK artifact integrity..." | |
| +CHECKSUMS_FILE="$ARTIFACTS_DIR/checksums/$VARIANT.json" | |
| +if [ ! -f "$CHECKSUMS_FILE" ]; then | |
| + echo "ERROR: missing checksums file for $VARIANT." >&2 | |
| + echo "Run: just fetch-zk-artifacts latest" >&2 | |
| + exit 1 | |
| +fi | |
| + | |
| required=( | |
| "$ARTIFACTS_DIR/manifest.json" | |
| - "$ARTIFACTS_DIR/checksums/$VARIANT.json" | |
| + "$CHECKSUMS_FILE" | |
| "$ARTIFACTS_DIR/circuits/$VARIANT.zkey" | |
| "$ARTIFACTS_DIR/wasm/$VARIANT/$VARIANT.wasm" | |
| "$ARTIFACTS_DIR/wasm/$VARIANT/witness_calculator.js" | |
| @@ -38,6 +55,41 @@ if [ "${#missing[@]}" -gt 0 ]; then | |
| exit 1 | |
| fi | |
| +# Hash verify all artifacts against the downloaded checksums manifest. | |
| +# This prevents test-zk from silently passing with corrupted/stale artifacts. | |
| +echo " Verifying artifact checksums..." | |
| +verify_errors=0 | |
| +while IFS= read -r filename; do | |
| + [ -n "$filename" ] || continue | |
| + | |
| + expected="$(jq -r --arg f "$filename" '.files[$f].sha256 // empty' "$CHECKSUMS_FILE")" | |
| + [ -z "$expected" ] && continue | |
| + | |
| + case "$filename" in | |
| + *.zkey) local_path="$ARTIFACTS_DIR/circuits/$filename" ;; | |
| + *.wasm) local_path="$ARTIFACTS_DIR/wasm/$VARIANT/$filename" ;; | |
| + *.bin) local_path="$ARTIFACTS_DIR/wasm/$VARIANT/$filename" ;; | |
| + witness_calculator.js) local_path="$ARTIFACTS_DIR/wasm/$VARIANT/$filename" ;; | |
| + *) local_path="$ARTIFACTS_DIR/$filename" ;; | |
| + esac | |
| + | |
| + if [ ! -f "$local_path" ]; then | |
| + echo " ✗ Missing: $local_path" >&2 | |
| + verify_errors=$((verify_errors + 1)) | |
| + elif [ "$(sha256_cmd "$local_path")" != "$expected" ]; then | |
| + echo " ✗ Checksum mismatch: $local_path" >&2 | |
| + verify_errors=$((verify_errors + 1)) | |
| + fi | |
| +done < <(jq -r '.files | keys[]' "$CHECKSUMS_FILE") | |
| + | |
| +if [ "$verify_errors" -gt 0 ]; then | |
| + echo "" >&2 | |
| + echo "Error: $verify_errors files failed checksum verification." >&2 | |
| + echo "Artifact integrity check failed — run 'just fetch-zk-artifacts latest' to re-download." >&2 | |
| + exit 1 | |
| +fi | |
| +echo " All artifacts verified. ✓" | |
| + | |
| # CircomKit's ProofTester reads from protocol/zk/build/<variant>/..., while | |
| # fetched prover artifacts live under protocol/zk/artifacts/. Stage symlinks so | |
| # `just test-zk` uses the downloaded artifacts instead of re-running setup. | |
| diff --git a/protocol/zk/scripts/test-witness.sh b/protocol/zk/scripts/test-witness.sh | |
| index b6cc80b..8b6d3db 100644 | |
| --- a/protocol/zk/scripts/test-witness.sh | |
| +++ b/protocol/zk/scripts/test-witness.sh | |
| @@ -48,8 +48,13 @@ fi | |
| PASS_COUNT=0 | |
| FAIL_COUNT=0 | |
| +SKIP_COUNT=0 | |
| FAILED_VARIANTS=() | |
| +# Default skip mode: disabled. rapidsnark proof failures fail the test. | |
| +# Enable with RAPIDSNARK_ALLOW_SKIP=1 for local Apple/Rosetta development. | |
| +ALLOW_SKIP="${RAPIDSNARK_ALLOW_SKIP:-0}" | |
| + | |
| for VARIANT in "${VARIANTS[@]}"; do | |
| INPUT="$(witness_input_for "$VARIANT")" | |
| WORKDIR="$(mktemp -d)" | |
| @@ -144,7 +149,20 @@ wc(buffer).then(async (calc) => { | |
| echo "[5/6] Proving with rapidsnark (native witness)..." | |
| PROOF_NATIVE="$WORKDIR/proof_native.json" | |
| PUBLIC_NATIVE="$WORKDIR/public_native.json" | |
| - if rapidsnark "$ZKEY" "$WTNS_NATIVE" "$PROOF_NATIVE" "$PUBLIC_NATIVE" 2>&1; then | |
| + | |
| + # Check if rapidsnark is available | |
| + if ! command -v rapidsnark &>/dev/null; then | |
| + if [ "$ALLOW_SKIP" = "1" ]; then | |
| + echo " SKIP: rapidsnark not found (RAPIDSNARK_ALLOW_SKIP=1)" | |
| + SKIP_COUNT=$((SKIP_COUNT + 1)) | |
| + else | |
| + echo " FAIL: rapidsnark not found. Install it or set RAPIDSNARK_ALLOW_SKIP=1 for local dev." >&2 | |
| + FAIL_COUNT=$((FAIL_COUNT + 1)) | |
| + FAILED_VARIANTS+=("$VARIANT") | |
| + rm -rf "$WORKDIR" | |
| + continue | |
| + fi | |
| + elif rapidsnark "$ZKEY" "$WTNS_NATIVE" "$PROOF_NATIVE" "$PUBLIC_NATIVE" 2>&1; then | |
| echo " PASS: proof generated" | |
| echo "[6/6] Proving with rapidsnark (WASM witness)..." | |
| @@ -171,8 +189,18 @@ wc(buffer).then(async (calc) => { | |
| continue | |
| fi | |
| else | |
| - echo " SKIP: rapidsnark unavailable (likely arm64/Rosetta — AVX not supported)" | |
| - echo " Witnesses are identical; proving must be verified on x86_64 hardware." | |
| + # rapidsnark failed - this is a failure unless ALLOW_SKIP is set | |
| + if [ "$ALLOW_SKIP" = "1" ]; then | |
| + echo " SKIP: rapidsnark proof failed (RAPIDSNARK_ALLOW_SKIP=1)" | |
| + echo " Witnesses are identical; proving must be verified on x86_64 hardware." | |
| + SKIP_COUNT=$((SKIP_COUNT + 1)) | |
| + else | |
| + echo " FAIL: rapidsnark proof generation failed" >&2 | |
| + FAIL_COUNT=$((FAIL_COUNT + 1)) | |
| + FAILED_VARIANTS+=("$VARIANT") | |
| + rm -rf "$WORKDIR" | |
| + continue | |
| + fi | |
| fi | |
| PASS_COUNT=$((PASS_COUNT + 1)) | |
| @@ -183,7 +211,7 @@ done | |
| # ---- Summary ---- | |
| echo "" | |
| echo "================================================================" | |
| -echo "=== RESULTS: $PASS_COUNT passed, $FAIL_COUNT failed ===" | |
| +echo "=== RESULTS: $PASS_COUNT passed, $FAIL_COUNT failed, $SKIP_COUNT skipped ===" | |
| echo "================================================================" | |
| if [ "$FAIL_COUNT" -gt 0 ]; then | |
| @@ -192,4 +220,8 @@ if [ "$FAIL_COUNT" -gt 0 ]; then | |
| fi | |
| echo "" | |
| +if [ "$SKIP_COUNT" -gt 0 ]; then | |
| + echo "Warning: $SKIP_COUNT variants skipped due to RAPIDSNARK_ALLOW_SKIP=1." | |
| + echo "Set ALLOW_SKIP=0 and run on x86_64 to verify full proof generation." | |
| +fi | |
| echo "All variants passed. The .bin files from this build-circuit (circom v2.2.3) are safe to deploy." | |
| diff --git a/protocol/zk/scripts/upload-artifacts.sh b/protocol/zk/scripts/upload-artifacts.sh | |
| index 2d8c81e..004412d 100755 | |
| --- a/protocol/zk/scripts/upload-artifacts.sh | |
| +++ b/protocol/zk/scripts/upload-artifacts.sh | |
| @@ -53,21 +53,57 @@ file_size() { | |
| stat -f%z "$1" 2>/dev/null || stat -c%s "$1" | |
| } | |
| -# --- Step 1: Compute content-hash version --- | |
| +# --- Step 1: Collect all artifact files for version computation --- | |
| echo "==> Computing artifact version..." | |
| tmp_input="$(mktemp)" | |
| trap 'rm -f "$tmp_input"' EXIT | |
| -# circuits.json is generated and gitignored; regenerate if missing | |
| +# Include circuit source in version | |
| [ -f "$ZK_DIR/circuits.json" ] || bash "$SCRIPT_DIR/codegen.sh" circuits-json | |
| cat "$ZK_DIR/circuits.json" > "$tmp_input" | |
| cat "$ZK_DIR/circomkit.json" >> "$tmp_input" | |
| while IFS= read -r circom_file; do | |
| cat "$circom_file" >> "$tmp_input" | |
| -done < <(find "$ZK_DIR/circuits" -type f -name '*.circom' | sort) | |
| +done < <(find "$ZK_DIR/circuits" -type f -name '*.circom' | sort | grep -v '/main/' | grep -v '/test/') | |
| + | |
| +# Now add all artifact checksums to the version | |
| +# This binds the version to the complete uploaded bundle, not just the source. | |
| +for variant in $(jq -r 'keys[]' "$ZK_DIR/circuits.json" 2>/dev/null | sort); do | |
| + zkey="$CIRCUITS_DIR/${variant}.zkey" | |
| + wasm="$WASM_DIR/${variant}/${variant}.wasm" | |
| + bin="$WASM_DIR/${variant}/${variant}.bin" | |
| + wc_js="$WASM_DIR/${variant}/witness_calculator.js" | |
| + | |
| + # Add artifact hashes to version input | |
| + if [ -f "$zkey" ]; then | |
| + sha256_cmd "$zkey" >> "$tmp_input" | |
| + fi | |
| + if [ -f "$wasm" ]; then | |
| + sha256_cmd "$wasm" >> "$tmp_input" | |
| + fi | |
| + if [ -f "$bin" ]; then | |
| + sha256_cmd "$bin" >> "$tmp_input" | |
| + fi | |
| + if [ -f "$wc_js" ]; then | |
| + sha256_cmd "$wc_js" >> "$tmp_input" | |
| + fi | |
| +done | |
| + | |
| +# Also include the committed verifier constants (proof-of-bind to on-chain) | |
| +verifier_src="$REPO_ROOT/protocol/contracts/src/verifiers/spend_10x4_v1_verifier.sol" | |
| +if [ -f "$verifier_src" ]; then | |
| + # Extract the verification key hash (last 64 hex chars in the powersoftau file) | |
| + if grep -q "verification_key" "$verifier_src"; then | |
| + grep "verification_key" "$verifier_src" | tail -1 >> "$tmp_input" | |
| + fi | |
| +fi | |
| + | |
| +# Include git SHA for reproducibility context | |
| +git_sha="$(git -C "$REPO_ROOT" rev-parse --short HEAD 2>/dev/null || echo "unknown")" | |
| +echo "$git_sha" >> "$tmp_input" | |
| VERSION="v$(sha256_cmd "$tmp_input" | head -c 16)" | |
| echo " Version: $VERSION" | |
| @@ -133,6 +169,7 @@ for variant in "${variants[@]}"; do | |
| zkey="$CIRCUITS_DIR/${variant}.zkey" | |
| wasm="$WASM_DIR/${variant}/${variant}.wasm" | |
| bin="$WASM_DIR/${variant}/${variant}.bin" | |
| + wc_js="$WASM_DIR/${variant}/witness_calculator.js" | |
| if [ ! -f "$zkey" ]; then | |
| echo " Missing: $zkey" >&2 | |
| @@ -146,6 +183,11 @@ for variant in "${variants[@]}"; do | |
| echo " Missing: $bin" >&2 | |
| missing=$((missing + 1)) | |
| fi | |
| + # witness_calculator.js is now required (was previously optional) | |
| + if [ ! -f "$wc_js" ]; then | |
| + echo " Missing: $wc_js (required for ZK test gates)" >&2 | |
| + missing=$((missing + 1)) | |
| + fi | |
| done | |
| if [ "$missing" -gt 0 ]; then | |
| -- | |
| 2.43.0 | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment