feat: McpToken — HTTP-mode mcplocal, CLI verbs, audit plumbing #50
@@ -12,4 +12,3 @@ dist
|
||||
.env.*
|
||||
deploy/docker-compose.yml
|
||||
src/cli
|
||||
src/mcplocal
|
||||
|
||||
@@ -5,7 +5,7 @@ _mcpctl() {
|
||||
local cur prev words cword
|
||||
_init_completion || return
|
||||
|
||||
local commands="status login logout config get describe delete logs create edit apply patch backup approve console cache"
|
||||
local commands="status login logout config get describe delete logs create edit apply patch backup approve console cache test"
|
||||
local project_commands="get describe delete logs create edit attach-server detach-server"
|
||||
local global_opts="-v --version --daemon-url --direct -p --project -h --help"
|
||||
local resources="servers instances secrets templates projects users groups rbac prompts promptrequests serverattachments proxymodels all"
|
||||
@@ -175,7 +175,7 @@ _mcpctl() {
|
||||
create)
|
||||
local create_sub=$(_mcpctl_get_subcmd $subcmd_pos)
|
||||
if [[ -z "$create_sub" ]]; then
|
||||
COMPREPLY=($(compgen -W "server secret project user group rbac prompt serverattachment promptrequest help" -- "$cur"))
|
||||
COMPREPLY=($(compgen -W "server secret project user group rbac mcptoken prompt serverattachment promptrequest help" -- "$cur"))
|
||||
else
|
||||
case "$create_sub" in
|
||||
server)
|
||||
@@ -194,7 +194,10 @@ _mcpctl() {
|
||||
COMPREPLY=($(compgen -W "--description --member --force -h --help" -- "$cur"))
|
||||
;;
|
||||
rbac)
|
||||
COMPREPLY=($(compgen -W "--subject --binding --operation --force -h --help" -- "$cur"))
|
||||
COMPREPLY=($(compgen -W "--subject --roleBindings --force -h --help" -- "$cur"))
|
||||
;;
|
||||
mcptoken)
|
||||
COMPREPLY=($(compgen -W "-p --project --rbac --bind --ttl --description --force -h --help" -- "$cur"))
|
||||
;;
|
||||
prompt)
|
||||
COMPREPLY=($(compgen -W "-p --project --content --content-file --priority --link -h --help" -- "$cur"))
|
||||
@@ -311,6 +314,21 @@ _mcpctl() {
|
||||
esac
|
||||
fi
|
||||
return ;;
|
||||
test)
|
||||
local test_sub=$(_mcpctl_get_subcmd $subcmd_pos)
|
||||
if [[ -z "$test_sub" ]]; then
|
||||
COMPREPLY=($(compgen -W "mcp help" -- "$cur"))
|
||||
else
|
||||
case "$test_sub" in
|
||||
mcp)
|
||||
COMPREPLY=($(compgen -W "--token --tool --args --expect-tools --timeout -o --output --no-health -h --help" -- "$cur"))
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=($(compgen -W "-h --help" -- "$cur"))
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
return ;;
|
||||
help)
|
||||
COMPREPLY=($(compgen -W "$commands" -- "$cur"))
|
||||
return ;;
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# Erase any stale completions from previous versions
|
||||
complete -c mcpctl -e
|
||||
|
||||
set -l commands status login logout config get describe delete logs create edit apply patch backup approve console cache
|
||||
set -l commands status login logout config get describe delete logs create edit apply patch backup approve console cache test
|
||||
set -l project_commands get describe delete logs create edit attach-server detach-server
|
||||
|
||||
# Disable file completions by default
|
||||
@@ -231,6 +231,7 @@ complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a approve -d 'Approve a pending prompt request (atomic: delete request, create prompt)'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a console -d 'Interactive MCP console — unified timeline with tools, provenance, and lab replay'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a cache -d 'Manage ProxyModel pipeline cache'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a test -d 'Utilities for testing MCP endpoints and config'
|
||||
|
||||
# Project-scoped commands (with --project)
|
||||
complete -c mcpctl -n "__mcpctl_has_project; and not __fish_seen_subcommand_from $project_commands" -a get -d 'List resources (servers, projects, instances, all)'
|
||||
@@ -280,13 +281,14 @@ complete -c mcpctl -n "__mcpctl_subcmd_active config claude-generate" -l stdout
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active config impersonate" -l quit -d 'Stop impersonating and return to original identity'
|
||||
|
||||
# create subcommands
|
||||
set -l create_cmds server secret project user group rbac prompt serverattachment promptrequest
|
||||
set -l create_cmds server secret project user group rbac mcptoken prompt serverattachment promptrequest
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a server -d 'Create an MCP server definition'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a secret -d 'Create a secret'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a project -d 'Create a project'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a user -d 'Create a user'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a group -d 'Create a group'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a rbac -d 'Create an RBAC binding definition'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a mcptoken -d 'Create a project-scoped API token for HTTP-mode mcplocal. The raw token is printed once.'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a prompt -d 'Create an approved prompt'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a serverattachment -d 'Attach a server to a project'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a promptrequest -d 'Create a prompt request (pending proposal that needs approval)'
|
||||
@@ -332,10 +334,17 @@ complete -c mcpctl -n "__mcpctl_subcmd_active create group" -l force -d 'Update
|
||||
|
||||
# create rbac options
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create rbac" -l subject -d 'Subject as Kind:name (repeat for multiple)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create rbac" -l binding -d 'Role binding as role:resource (e.g. edit:servers, run:projects)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create rbac" -l operation -d 'Operation binding (e.g. logs, backup)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create rbac" -l roleBindings -d 'Role binding as key:value pairs, e.g. "role:view,resource:servers" or "role:view,resource:servers,name:my-ha" or "action:logs" (repeat for multiple)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create rbac" -l force -d 'Update if already exists'
|
||||
|
||||
# create mcptoken options
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create mcptoken" -s p -l project -d 'Project this token is bound to' -xa '(__mcpctl_project_names)'
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create mcptoken" -l rbac -d 'Base RBAC: \'empty\' (default, no bindings) or \'clone\' (snapshot creator\'s perms)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create mcptoken" -l bind -d 'Additional role binding as key:value pairs, e.g. "role:view,resource:servers" or "action:logs" (repeat for multiple). Creator perms are the ceiling.' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create mcptoken" -l ttl -d 'Expiry: \'30d\', \'12h\', \'never\', or an ISO8601 datetime' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create mcptoken" -l description -d 'Freeform description' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create mcptoken" -l force -d 'Revoke any existing active token with this name, then create a new one'
|
||||
|
||||
# create prompt options
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create prompt" -s p -l project -d 'Project name to scope the prompt to' -xa '(__mcpctl_project_names)'
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create prompt" -l content -d 'Prompt content text' -x
|
||||
@@ -369,6 +378,19 @@ complete -c mcpctl -n "__fish_seen_subcommand_from cache; and not __fish_seen_su
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active cache clear" -l older-than -d 'Clear entries older than N days' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active cache clear" -s y -l yes -d 'Skip confirmation'
|
||||
|
||||
# test subcommands
|
||||
set -l test_cmds mcp
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from test; and not __fish_seen_subcommand_from $test_cmds" -a mcp -d 'Verify a Streamable-HTTP MCP endpoint: health, initialize, tools/list, optionally call a tool.'
|
||||
|
||||
# test mcp options
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -l token -d 'Bearer token (also reads $MCPCTL_TOKEN)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -l tool -d 'Invoke a specific tool after listing' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -l args -d 'JSON-encoded arguments for --tool' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -l expect-tools -d 'Comma-separated tool names that MUST appear; fails otherwise' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -l timeout -d 'Per-request timeout in seconds' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -s o -l output -d 'Output format: text or json' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -l no-health -d 'Skip the /healthz preflight check'
|
||||
|
||||
# status options
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from status" -s o -l output -d 'output format (table, json, yaml)' -x
|
||||
|
||||
|
||||
60
deploy/Dockerfile.mcplocal
Normal file
60
deploy/Dockerfile.mcplocal
Normal file
@@ -0,0 +1,60 @@
|
||||
# HTTP-only mcplocal for k8s deploy (Service `mcp`, Ingress `mcp.ad.itaz.eu`).
|
||||
# Container CMD runs the `serve.ts` entry which — unlike the systemd/STDIO
|
||||
# entry — has no stdin/stdout MCP client and bootstraps exclusively from env.
|
||||
|
||||
# Stage 1: Build TypeScript
|
||||
FROM node:20-alpine AS builder
|
||||
|
||||
RUN corepack enable && corepack prepare pnpm@9.15.0 --activate
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy workspace config and package manifests
|
||||
COPY pnpm-workspace.yaml pnpm-lock.yaml package.json tsconfig.base.json ./
|
||||
COPY src/mcplocal/package.json src/mcplocal/tsconfig.json src/mcplocal/
|
||||
COPY src/shared/package.json src/shared/tsconfig.json src/shared/
|
||||
COPY src/db/package.json src/db/tsconfig.json src/db/
|
||||
|
||||
# Install all dependencies
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
# Copy source
|
||||
COPY src/mcplocal/src/ src/mcplocal/src/
|
||||
COPY src/shared/src/ src/shared/src/
|
||||
COPY src/db/src/ src/db/src/
|
||||
COPY src/db/prisma/ src/db/prisma/
|
||||
|
||||
# Build (mcplocal depends on shared; db is pulled transitively by shared/... actually
|
||||
# mcplocal does not depend on db at runtime — prisma client is only used by mcpd).
|
||||
RUN pnpm -F @mcpctl/shared build && pnpm -F @mcpctl/mcplocal build
|
||||
|
||||
# Stage 2: Production runtime
|
||||
FROM node:20-alpine
|
||||
|
||||
RUN corepack enable && corepack prepare pnpm@9.15.0 --activate
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy workspace config, manifests, and lockfile
|
||||
COPY pnpm-workspace.yaml pnpm-lock.yaml package.json ./
|
||||
COPY src/mcplocal/package.json src/mcplocal/
|
||||
COPY src/shared/package.json src/shared/
|
||||
|
||||
# Install deps (production only — no db / prisma runtime here).
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
# Copy built output
|
||||
COPY --from=builder /app/src/shared/dist/ src/shared/dist/
|
||||
COPY --from=builder /app/src/mcplocal/dist/ src/mcplocal/dist/
|
||||
|
||||
EXPOSE 3200
|
||||
|
||||
# Cache directory — expected to be mounted as a PVC in k8s.
|
||||
VOLUME /var/lib/mcplocal/cache
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=5s --retries=3 --start-period=10s \
|
||||
CMD wget -q --spider http://localhost:3200/healthz || exit 1
|
||||
|
||||
# MCPLOCAL_MCPD_URL and MCPLOCAL_MCPD_TOKEN are required and must come from
|
||||
# the Pulumi-managed Secret. Other env vars default sensibly.
|
||||
CMD ["node", "src/mcplocal/dist/serve.js"]
|
||||
174
docs/mcptoken-implementation.md
Normal file
174
docs/mcptoken-implementation.md
Normal file
@@ -0,0 +1,174 @@
|
||||
# mcptoken + HTTP-mode mcplocal — implementation log
|
||||
|
||||
Companion to the approved plan at `/home/michal/.claude/plans/lets-discuss-something-i-bright-lovelace.md`.
|
||||
This file is updated as each milestone lands, so you can review what was actually done vs. what was planned.
|
||||
|
||||
## Context (why)
|
||||
|
||||
You're running your own vLLM inference outside Claude Code and want it to consume mcpctl over MCP with the same UX Claude gets: project-scoped server discovery, proxy models, the pipeline cache. Today `mcplocal` is systemd-only and serves STDIO — unreachable from off-host and unauthenticated. This work adds:
|
||||
|
||||
1. A containerized, network-accessible `mcplocal` serving Streamable HTTP.
|
||||
2. A new `McpToken` resource (CLI: `mcpctl get/create/delete mcptoken`) — project-scoped bearer tokens with the same RBAC stack as users. Hashed at rest; raw value shown once.
|
||||
3. Tokens as a first-class RBAC subject kind (`McpToken:<sha>`), with a creator-permission ceiling so non-admins cannot mint escalated tokens.
|
||||
4. k8s deploy (Service `mcp`, Ingress `mcp.ad.itaz.eu`, PVC-backed `FileCache`).
|
||||
5. A CLI breaking change: `mcpctl create rbac --binding edit:servers` → `--roleBindings role:edit,resource:servers`. You explicitly asked for this; only one command uses it.
|
||||
6. A product-grade `mcpctl test mcp <url>` verb for validating any Streamable-HTTP MCP endpoint, reused by smoke tests.
|
||||
|
||||
## Branch
|
||||
|
||||
All work lives on `feat/mcptoken` (off `main` at `3149ea3`).
|
||||
|
||||
## Pre-work committed to main (outside this branch)
|
||||
|
||||
Before starting the feature, we flushed your in-flight changes to main so they wouldn't travel with the branch:
|
||||
|
||||
- **`3149ea3 fix: MCP proxy resilience — discovery cache, default liveness probes`** — per-server `tools/list` cache in `McpRouter` with positive+negative TTL so dead upstreams only stall the first call; default liveness probe (tools/list through the real production path) applied to any RUNNING instance without an explicit healthCheck. Already pushed to origin.
|
||||
|
||||
## Status legend
|
||||
|
||||
- ✅ done
|
||||
- 🚧 in progress
|
||||
- ⬜ not started
|
||||
|
||||
## PR 1 — Schema + token helpers + mcpd CRUD routes ✅
|
||||
|
||||
| # | Step | Status |
|
||||
|---|---|---|
|
||||
| 1 | `McpToken` Prisma model + Project/User reverse relations; `AuditEvent.tokenName` / `tokenSha` + index | ✅ |
|
||||
| 2 | `src/shared/src/tokens/index.ts` — `generateToken`, `hashToken`, `isMcpToken`, `timingSafeEqualHex`, `TOKEN_PREFIX` | ✅ |
|
||||
| 3 | `src/mcpd/src/repositories/mcp-token.repository.ts` + new interfaces in `repositories/interfaces.ts` | ✅ |
|
||||
| 4 | `src/mcpd/src/services/mcp-token.service.ts` — creator-ceiling via `rbacService.canAccess`/`canRunOperation`, raw token returned only once, auto-creates an `RbacDefinition` with subject `McpToken:<sha>` when bindings are non-empty | ✅ |
|
||||
| 5 | `src/mcpd/src/routes/mcp-tokens.ts` — POST / GET / GET:id / DELETE:id + POST:id/revoke + GET /introspect | ✅ |
|
||||
| 6 | Wired into `main.ts` — repo/service constructed, routes registered, `mcptokens` added to URL→permission map + name resolver; `/mcptokens/introspect` added to auth-skip list so mcplocal can call it with a raw McpToken bearer | ✅ |
|
||||
| 7 | RBAC extensions: new subject kind `McpToken` in `rbac-definition.schema.ts`; `mcptokens` added to `RBAC_RESOURCES` and `RESOURCE_ALIASES`; `rbac.service.ts` threads optional `mcpTokenSha` through `canAccess`, `canRunOperation`, `getAllowedScope`, `getPermissions`; resolver matches `{kind:'McpToken', name: sha}` | ✅ |
|
||||
| 8 | Unit tests — `tests/mcp-token-service.test.ts` covering: empty/clone modes, ceiling rejection, RbacDefinition auto-create with correct `McpToken:<sha>` subject, duplicate-name conflict, introspect valid/revoked/expired/unknown, revoke deletes the RbacDefinition. 11/11 green. Full mcpd suite still 648/648. | ✅ |
|
||||
|
||||
### What this PR does NOT do yet (coming in PR 3)
|
||||
|
||||
- The mcpd **auth middleware** does not yet dispatch on the token prefix. A raw `mcpctl_pat_…` bearer sent to any `/api/v1/*` endpoint (other than `/introspect`) is still rejected as an invalid session. That's intentional — PR 3 extends `middleware/auth.ts` to recognize both session bearers and McpToken bearers.
|
||||
- No CLI yet. Tokens can be created only via `POST /api/v1/mcptokens` for now.
|
||||
|
||||
## PR 2 — RBAC CLI migration ✅
|
||||
|
||||
Migrated `mcpctl create rbac` from positional flag syntax to the key=value form you asked for.
|
||||
|
||||
Before:
|
||||
```
|
||||
mcpctl create rbac developers \
|
||||
--subject User:alice@test.com \
|
||||
--binding edit:servers \
|
||||
--binding view:servers:my-ha \
|
||||
--operation logs
|
||||
```
|
||||
After:
|
||||
```
|
||||
mcpctl create rbac developers \
|
||||
--subject User:alice@test.com \
|
||||
--roleBindings role:edit,resource:servers \
|
||||
--roleBindings role:view,resource:servers,name:my-ha \
|
||||
--roleBindings action:logs
|
||||
```
|
||||
|
||||
| # | Step | Status |
|
||||
|---|---|---|
|
||||
| 1 | New shared parser at `src/cli/src/commands/rbac-bindings.ts` exporting `parseRoleBinding(entry)` | ✅ |
|
||||
| 2 | `src/cli/src/commands/create.ts` — old `--binding`/`--operation` flags replaced with one repeatable `--roleBindings <kv>`. Uses the new parser. | ✅ |
|
||||
| 3 | Tests in `src/cli/tests/commands/create.test.ts` rewritten to the new form (8 RBAC tests updated) | ✅ |
|
||||
| 4 | New dedicated unit test `src/cli/tests/commands/rbac-bindings.test.ts` — 9 cases covering unscoped / name-scoped / action / trim / empty-value / unknown-key / action-conflict / missing-role rejections | ✅ |
|
||||
| 5 | Shell completions regenerated via `pnpm completions:generate` — both `completions/mcpctl.{bash,fish}` now offer `--roleBindings`, no longer `--binding`/`--operation` | ✅ |
|
||||
| 6 | Nothing in `docs/` or `README.md` referenced the old flags | ✅ |
|
||||
|
||||
Full CLI suite still 406/406 green. On-disk YAML shape (`roleBindings: [...]`) is unchanged, so backups and existing `apply -f` files keep working.
|
||||
|
||||
The extracted `parseRoleBinding` helper is what PR 3's `mcpctl create mcptoken --bind <kv>` flag will reuse.
|
||||
|
||||
## PR 3 — CLI mcptoken verbs + mcpd auth dispatch + audit ✅
|
||||
|
||||
| # | Step | Status |
|
||||
|---|---|---|
|
||||
| 1 | `src/mcpd/src/middleware/auth.ts` — dispatch on the bearer prefix. `mcpctl_pat_…` → new `findMcpToken(hash)` dep → populates `request.mcpToken` + `request.userId = ownerId`. Other bearers → existing `findSession` path. Returns 401 for revoked, expired, or unknown tokens. Fastify module augmentation adds `request.mcpToken?: McpTokenPrincipal`. | ✅ |
|
||||
| 2 | `src/mcpd/src/main.ts` — wires `findMcpToken: mcpTokenRepo.findByHash`. Threads `mcpTokenSha` into `canAccess` / `canRunOperation` / `getAllowedScope`. Adds a second project-scope check: `McpToken` principals can only reach resources inside their bound project (additional guard on top of the route handler checks). | ✅ |
|
||||
| 3 | New auth tests (`tests/auth.test.ts`) — 3 McpToken dispatch cases: happy path sets userId + mcpToken, revoked → 401, no findMcpToken wired → 401. Session path unchanged. | ✅ |
|
||||
| 4 | `mcpctl create mcptoken <name> -p <proj> [--rbac empty\|clone] [--bind …] [--ttl …]` — new subcommand. Reuses `parseRoleBinding` from PR 2. `parseTtl` helper accepts `30d`/`12h`/`never`/ISO8601. `--force` revokes the existing active token and creates a new one. Raw token is printed once with a "copy now" banner. | ✅ |
|
||||
| 5 | `mcpctl get mcptokens` + `mcpctl get mcptoken <name> -p <proj>` + `mcpctl describe mcptoken <name> -p <proj>` + `mcpctl delete mcptoken <name> -p <proj>`. Names are project-scoped, so all verbs require `-p` unless a CUID is passed. Table columns: NAME / PROJECT / PREFIX / CREATED / LAST USED / EXPIRES / STATUS. Describe surfaces the auto-created RbacDefinition's bindings (matched by `mcptoken-<id>` name convention). | ✅ |
|
||||
| 6 | `mcpctl apply -f` — added `McpTokenSpecSchema`, `mcpton: 'mcptokens'` in `KIND_TO_RESOURCE`, and an applier that creates if missing or logs "already active — skipped" (tokens are immutable). Raw token printed on create. | ✅ |
|
||||
| 7 | Resource aliases — `mcptoken`/`mcptokens`/`token`/`tokens` all resolve to `mcptokens`. `stripInternalFields` scrubs the secret and derived fields and promotes `projectName` → `project` for YAML round-trip. | ✅ |
|
||||
| 8 | Audit pipeline — `src/mcplocal/src/audit/types.ts` gains `tokenName?`/`tokenSha?`; collector gets `setSessionMcpToken(sessionId, {tokenName, tokenSha})` alongside `setSessionUserName`, both merged into a per-session principal map. `src/mcpd/src/services/audit-event.service.ts` accepts `tokenName` and `tokenSha` query params (repo already extended in PR 1). `console/audit-types.ts` carries the new optional fields so the TUI can surface them in a follow-up. | ✅ |
|
||||
| 9 | Shell completions regenerated — `mcpctl create mcptoken` flags (`--project`, `--rbac`, `--bind`, `--ttl`, `--description`, `--force`) and the new resource alias land in both bash and fish completions. `completions.test.ts` freshness check passes. | ✅ |
|
||||
|
||||
### What this PR does NOT do yet (coming in PR 4)
|
||||
|
||||
- No HTTP-mode mcplocal binary yet. Tokens can be used to hit mcpd directly via `/api/v1/…` with `Authorization: Bearer mcpctl_pat_…`, but the containerized `/projects/<p>/mcp` endpoint and its token-auth preHandler don't exist yet.
|
||||
- The audit-console TUI still shows only `userName` columns; adding a `TOKEN` column is a UI polish follow-up.
|
||||
|
||||
### Test stats
|
||||
|
||||
- 1764/1764 tests pass workspace-wide (up from ~1750 before PR 3).
|
||||
- Build clean across all 5 packages.
|
||||
- Completions freshness check green.
|
||||
|
||||
## PR 4 — HTTP-mode mcplocal + container + `mcpctl test mcp` + smoke ✅
|
||||
|
||||
| # | Step | Status |
|
||||
|---|---|---|
|
||||
| 1 | **Shared HTTP MCP client** — `src/shared/src/mcp-http/index.ts`. `McpHttpSession(url, {bearer?, headers?, timeoutMs?})` with `initialize / listTools / callTool / close / send / sendNotification`. Handles http + https, multiplexed SSE bodies, JSON-RPC id correlation. Distinct `McpProtocolError` / `McpTransportError` classes for contract-vs-transport failures. Plus `deriveBaseUrl(url)` + `mcpHealthCheck(base)`. Exported from `@mcpctl/shared`. | ✅ |
|
||||
| 2 | **`mcpctl test mcp <url>`** — new CLI verb under `src/cli/src/commands/test-mcp.ts`. Flags: `--token` (also reads `$MCPCTL_TOKEN`), `--tool`, `--args` (JSON), `--expect-tools`, `--timeout`, `-o text\|json`, `--no-health`. Exit codes: 0 PASS, 1 TRANSPORT/AUTH FAIL, 2 CONTRACT FAIL (e.g. missing tool or `isError=true`). | ✅ |
|
||||
| 3 | **Unit tests** for the verb — `src/cli/tests/commands/test-mcp.test.ts`. 9 cases: happy path, health preflight failure, `--expect-tools` miss / hit, transport throw, `--tool` + `isError` → exit 2, `-o json` report, `$MCPCTL_TOKEN` env fallback, invalid `--args`. All green. | ✅ |
|
||||
| 4 | **`src/mcplocal/src/serve.ts`** — new HTTP-only entry. Drops `StdioProxyServer` and `--upstream`; forces host/port from `MCPLOCAL_HTTP_HOST`/`MCPLOCAL_HTTP_PORT`; requires `MCPLOCAL_MCPD_URL`. Registers a Fastify preHandler that runs the new `token-auth` middleware on `/projects/*` and `/mcp`. Preserves LLM provider loading + proxymodel hot-reload watchers. | ✅ |
|
||||
| 5 | **`src/mcplocal/src/http/token-auth.ts`** — Fastify preHandler that validates `mcpctl_pat_…` bearers by calling `GET <mcpd>/api/v1/mcptokens/introspect`. Cache: 30s positive / 5s negative TTL keyed on `hashToken(raw)`. Rejects non-Bearer, non-`mcpctl_pat_`, revoked, expired, and wrong-project (403 when path `projectName` ≠ token's bound project). Sets `request.mcpToken = { tokenName, tokenSha, projectName }` for the audit collector. | ✅ |
|
||||
| 6 | **FileCache PVC plumbing** — `src/mcplocal/src/http/project-mcp-endpoint.ts` now honours `process.env.MCPLOCAL_CACHE_DIR` at both `FileCache` construction sites (gated + dynamic). No constructor change needed — `FileCache` already accepted a `dir` config; we just wire the env-derived value through. | ✅ |
|
||||
| 7 | **Audit collector integration** — when `request.mcpToken` is set, the `onsessioninitialized` handler in `project-mcp-endpoint.ts` now also calls `collector.setSessionMcpToken(id, {tokenName, tokenSha})` alongside the existing `setSessionUserName`. Session map from PR 3 merges both principals. | ✅ |
|
||||
| 8 | **Container image** — `deploy/Dockerfile.mcplocal` mirrors `Dockerfile.mcpd` shape: multi-stage Node 20 Alpine, pnpm workspace build of `@mcpctl/shared` + `@mcpctl/mcplocal`, runtime `CMD node src/mcplocal/dist/serve.js`, `EXPOSE 3200`, `VOLUME /var/lib/mcplocal/cache`, `HEALTHCHECK` on `/healthz`. | ✅ |
|
||||
| 9 | **Build + push script** — `scripts/build-mcplocal.sh` (executable, 755) mirrors `build-mcpd.sh`. Pushes to `10.0.0.194:3012/michal/mcplocal:latest`. | ✅ |
|
||||
| 10 | **`fulldeploy.sh`** — now a 4-step pipeline: (1) build + push mcpd, (2) build + push mcplocal, (3) rollout both deployments on k8s (mcplocal gated behind a `kubectl get deployment/mcplocal` check so the script stays green before the Pulumi stack lands), (4) RPM release. Smoke suite runs at the end as before. | ✅ |
|
||||
| 11 | **`mcpctl test mcp` + new create flags in completions** — bash + fish regenerated. `src/mcplocal/package.json` gains a `serve` script for convenience. | ✅ |
|
||||
| 12 | **Smoke test** — `src/mcplocal/tests/smoke/mcptoken.smoke.test.ts`. Gated on `healthz($MCPGW_URL)`; skipped with a clear warning if the gateway is unreachable. Scenarios: happy path via `mcpctl test mcp` → exit 0; cross-project → exit 1 with a 403 message; `--expect-tools __nonexistent__` → exit 2; delete-then-retry after the 5s negative-cache window → exit 1 with 401. Cleans up both projects at the end. | ✅ |
|
||||
|
||||
### Deploy-time steps still owed (outside this repo)
|
||||
|
||||
- **Pulumi (`../kubernetes-deployment`, stack `homelab`)** — add a `Deployment` named `mcplocal` in ns `mcpctl` pointing at `10.0.0.194:3012/michal/mcplocal:latest` (internal registry), a `Service` named `mcp` (port 3200→80, ClusterIP), an `Ingress` for `mcp.ad.itaz.eu` with TLS via the existing cluster-issuer, a PVC `mcplocal-cache` (10Gi RWO, mounted `/var/lib/mcplocal/cache`), and a NetworkPolicy mirroring mcpd's. Required env: **just `MCPLOCAL_MCPD_URL`** (point at `http://mcpd.mcpctl.svc.cluster.local:3100`). Optionally `MCPLOCAL_TOKEN_POSITIVE_TTL_MS` / `MCPLOCAL_TOKEN_NEGATIVE_TTL_MS` for stricter revocation. `fulldeploy.sh` already runs `pulumi preview` first and halts on drift.
|
||||
- **No pod-level secret required** (revised from earlier draft) — the pod has no persistent identity to mcpd. Every inbound `Authorization: Bearer mcpctl_pat_…` is forwarded verbatim to mcpd, and mcpd's auth middleware resolves the McpToken principal. This eliminates the original `MCPLOCAL_MCPD_TOKEN` secret and its rotation story. Trade-off: a token with `--rbac=empty` can't read `/api/v1/projects/:name/servers`, but it also can't meaningfully serve MCP, so this is the right failure mode. See `src/mcplocal/src/serve.ts` header comment.
|
||||
- **LLM provider config** — if any project served by this pod is `gated: true`, mount your `~/.mcpctl/config.json` as a ConfigMap at `/root/.mcpctl/config.json`. Ungated projects (proxyModel `content-pipeline` or no LLM-driven stages) need nothing.
|
||||
|
||||
### Test stats
|
||||
|
||||
- 1773/1773 workspace tests pass (up from 1764 before PR 4).
|
||||
- All five packages build clean.
|
||||
- Shell completions fresh.
|
||||
- `mcpctl test mcp --help` and `mcpctl create mcptoken --help` render expected surfaces.
|
||||
|
||||
## End-to-end verification (manual, after Pulumi resources land)
|
||||
|
||||
```bash
|
||||
# From a workstation outside the k8s cluster:
|
||||
mcpctl create project vllm --force
|
||||
TOK=$(mcpctl create mcptoken vllm-token --project vllm --rbac clone | grep mcpctl_pat_)
|
||||
export MCPCTL_TOKEN="$TOK"
|
||||
|
||||
# Probe the public gateway
|
||||
mcpctl test mcp https://mcp.ad.itaz.eu/projects/vllm/mcp --expect-tools begin_session
|
||||
|
||||
# Negative: wrong project → exit 1
|
||||
mcpctl test mcp https://mcp.ad.itaz.eu/projects/other/mcp
|
||||
echo $? # 1
|
||||
|
||||
# Audit — the call should be tagged with tokenName=vllm-token
|
||||
mcpctl console --audit # look for the TOKEN column once the TUI patch lands
|
||||
```
|
||||
|
||||
## Design decisions recap (so you don't have to re-read the plan)
|
||||
|
||||
| Decision | Choice |
|
||||
|---|---|
|
||||
| Transport | Streamable HTTP only |
|
||||
| Binary shape | Same `@mcpctl/mcplocal` package, two entry files (`main.ts` STDIO, `serve.ts` HTTP) |
|
||||
| Container runtime | Node (not bun-compiled) — mirrors mcpd |
|
||||
| Cache | PVC at `/var/lib/mcplocal/cache` |
|
||||
| Hostname | k8s Service `mcp`, Ingress `mcp.ad.itaz.eu` |
|
||||
| Token format | `mcpctl_pat_<32-byte base62>`, stored as SHA-256, shown-once at create |
|
||||
| Resource | `McpToken`, CLI noun `mcptoken`, one-project-per-token, FK cascade |
|
||||
| Subject kind | New `McpToken:<sha>` |
|
||||
| TTL | No default. Optional `--ttl 30d` / `never` / ISO date |
|
||||
| Default bindings | `--rbac=empty` (default), `--rbac=clone`, `--bind <kv>` — creator ceiling enforced server-side |
|
||||
| Binding CLI | `--roleBindings role:view,resource:servers[,name:foo]` or `--roleBindings action:logs` |
|
||||
| Project enforcement | Endpoint visibility only (no strict create-time check) — same mechanism Claude uses |
|
||||
@@ -53,18 +53,30 @@ else
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo ">>> Step 1/3: Build & push mcpd Docker image"
|
||||
echo ">>> Step 1/4: Build & push mcpd Docker image"
|
||||
echo ""
|
||||
bash scripts/build-mcpd.sh "$@"
|
||||
|
||||
echo ""
|
||||
echo ">>> Step 2/3: Roll out mcpd on k8s ($KUBE_CONTEXT / $KUBE_NAMESPACE)"
|
||||
echo ">>> Step 2/4: Build & push mcplocal (HTTP-mode) Docker image"
|
||||
echo ""
|
||||
bash scripts/build-mcplocal.sh "$@"
|
||||
|
||||
echo ""
|
||||
echo ">>> Step 3/4: Roll out mcpd + mcplocal on k8s ($KUBE_CONTEXT / $KUBE_NAMESPACE)"
|
||||
echo ""
|
||||
kubectl --context "$KUBE_CONTEXT" -n "$KUBE_NAMESPACE" rollout restart "deployment/$KUBE_DEPLOYMENT"
|
||||
kubectl --context "$KUBE_CONTEXT" -n "$KUBE_NAMESPACE" rollout status "deployment/$KUBE_DEPLOYMENT" --timeout=3m
|
||||
if kubectl --context "$KUBE_CONTEXT" -n "$KUBE_NAMESPACE" get deployment/mcplocal >/dev/null 2>&1; then
|
||||
kubectl --context "$KUBE_CONTEXT" -n "$KUBE_NAMESPACE" rollout restart deployment/mcplocal
|
||||
kubectl --context "$KUBE_CONTEXT" -n "$KUBE_NAMESPACE" rollout status deployment/mcplocal --timeout=3m
|
||||
else
|
||||
echo " NOTE: deployment/mcplocal does not exist in the cluster yet — skipping rollout."
|
||||
echo " Apply the Pulumi stack in ../kubernetes-deployment to create it."
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo ">>> Step 3/3: Build, publish & install RPM"
|
||||
echo ">>> Step 4/4: Build, publish & install RPM"
|
||||
echo ""
|
||||
bash scripts/release.sh
|
||||
|
||||
|
||||
83
scripts/build-mcplocal.sh
Executable file
83
scripts/build-mcplocal.sh
Executable file
@@ -0,0 +1,83 @@
|
||||
#!/bin/bash
|
||||
# Build mcplocal (HTTP-only) Docker image and push to Gitea container registry.
|
||||
#
|
||||
# Usage:
|
||||
# ./build-mcplocal.sh [tag] # Build for native arch
|
||||
# ./build-mcplocal.sh [tag] --platform linux/amd64
|
||||
# ./build-mcplocal.sh [tag] --multi-arch
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
# Load .env for GITEA_TOKEN
|
||||
if [ -f .env ]; then
|
||||
set -a; source .env; set +a
|
||||
fi
|
||||
|
||||
# Push directly to internal address (external proxy has body size limit)
|
||||
REGISTRY="10.0.0.194:3012"
|
||||
IMAGE="mcplocal"
|
||||
TAG="${1:-latest}"
|
||||
|
||||
PLATFORM=""
|
||||
MULTI_ARCH=false
|
||||
shift 2>/dev/null || true
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--platform)
|
||||
PLATFORM="$2"
|
||||
shift 2
|
||||
;;
|
||||
--multi-arch)
|
||||
MULTI_ARCH=true
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$MULTI_ARCH" = true ]; then
|
||||
echo "==> Building multi-arch $IMAGE image (linux/amd64 + linux/arm64)..."
|
||||
podman build --platform linux/amd64,linux/arm64 \
|
||||
--manifest "$IMAGE:$TAG" -f deploy/Dockerfile.mcplocal .
|
||||
|
||||
echo "==> Tagging manifest as $REGISTRY/michal/$IMAGE:$TAG..."
|
||||
podman tag "$IMAGE:$TAG" "$REGISTRY/michal/$IMAGE:$TAG"
|
||||
|
||||
echo "==> Logging in to $REGISTRY..."
|
||||
podman login --tls-verify=false -u michal -p "$GITEA_TOKEN" "$REGISTRY"
|
||||
|
||||
echo "==> Pushing manifest to $REGISTRY/michal/$IMAGE:$TAG..."
|
||||
podman manifest push --tls-verify=false --all \
|
||||
"$REGISTRY/michal/$IMAGE:$TAG" "docker://$REGISTRY/michal/$IMAGE:$TAG"
|
||||
else
|
||||
PLATFORM_FLAG=""
|
||||
if [ -n "$PLATFORM" ]; then
|
||||
PLATFORM_FLAG="--platform $PLATFORM"
|
||||
echo "==> Building $IMAGE image for $PLATFORM..."
|
||||
else
|
||||
echo "==> Building $IMAGE image (native arch)..."
|
||||
fi
|
||||
|
||||
podman build $PLATFORM_FLAG -t "$IMAGE:$TAG" -f deploy/Dockerfile.mcplocal .
|
||||
|
||||
echo "==> Tagging as $REGISTRY/michal/$IMAGE:$TAG..."
|
||||
podman tag "$IMAGE:$TAG" "$REGISTRY/michal/$IMAGE:$TAG"
|
||||
|
||||
echo "==> Logging in to $REGISTRY..."
|
||||
podman login --tls-verify=false -u michal -p "$GITEA_TOKEN" "$REGISTRY"
|
||||
|
||||
echo "==> Pushing to $REGISTRY/michal/$IMAGE:$TAG..."
|
||||
podman push --tls-verify=false "$REGISTRY/michal/$IMAGE:$TAG"
|
||||
fi
|
||||
|
||||
# Ensure package is linked to the repository
|
||||
source "$SCRIPT_DIR/link-package.sh"
|
||||
link_package "container" "$IMAGE"
|
||||
|
||||
echo "==> Done!"
|
||||
echo " Image: $REGISTRY/michal/$IMAGE:$TAG"
|
||||
169
scripts/demo-mcp-call.py
Executable file
169
scripts/demo-mcp-call.py
Executable file
@@ -0,0 +1,169 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Demo: make an MCP request against mcplocal using an McpToken bearer.
|
||||
|
||||
This is the standalone counterpart to `mcpctl test mcp` — intended to show
|
||||
exactly what a non-Claude client (e.g. a vLLM-driven agent) would do.
|
||||
|
||||
Usage:
|
||||
# Default: localhost mcplocal, sre project, token from $MCPCTL_TOKEN
|
||||
export MCPCTL_TOKEN=mcpctl_pat_...
|
||||
python3 scripts/demo-mcp-call.py
|
||||
|
||||
# Custom URL/project/tool
|
||||
python3 scripts/demo-mcp-call.py \\
|
||||
--url https://mcp.ad.itaz.eu \\
|
||||
--project sre \\
|
||||
--token "$MCPCTL_TOKEN" \\
|
||||
--tool begin_session \\
|
||||
--args '{"description":"hello"}'
|
||||
|
||||
No third-party deps — pure stdlib. Mirrors the protocol that
|
||||
src/shared/src/mcp-http/index.ts implements on the TypeScript side.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
from typing import Any
|
||||
|
||||
|
||||
def _parse_sse(body: str) -> list[dict[str, Any]]:
|
||||
"""Parse a text/event-stream body into a list of JSON-RPC messages."""
|
||||
out: list[dict[str, Any]] = []
|
||||
for line in body.splitlines():
|
||||
if line.startswith("data: "):
|
||||
try:
|
||||
out.append(json.loads(line[6:]))
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
return out
|
||||
|
||||
|
||||
class McpSession:
|
||||
def __init__(self, url: str, bearer: str | None = None, timeout: float = 30.0):
|
||||
self.url = url
|
||||
self.bearer = bearer
|
||||
self.timeout = timeout
|
||||
self.session_id: str | None = None
|
||||
self._next_id = 1
|
||||
|
||||
def _headers(self) -> dict[str, str]:
|
||||
h = {
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json, text/event-stream",
|
||||
}
|
||||
if self.bearer:
|
||||
h["Authorization"] = f"Bearer {self.bearer}"
|
||||
if self.session_id:
|
||||
h["mcp-session-id"] = self.session_id
|
||||
return h
|
||||
|
||||
def send(self, method: str, params: dict[str, Any] | None = None) -> Any:
|
||||
rid = self._next_id
|
||||
self._next_id += 1
|
||||
payload = {"jsonrpc": "2.0", "id": rid, "method": method, "params": params or {}}
|
||||
req = urllib.request.Request(
|
||||
self.url,
|
||||
data=json.dumps(payload).encode("utf-8"),
|
||||
headers=self._headers(),
|
||||
method="POST",
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=self.timeout) as resp:
|
||||
body = resp.read().decode("utf-8")
|
||||
content_type = resp.headers.get("content-type", "")
|
||||
# First successful response carries the session id.
|
||||
if self.session_id is None:
|
||||
sid = resp.headers.get("mcp-session-id")
|
||||
if sid:
|
||||
self.session_id = sid
|
||||
messages: list[dict[str, Any]] = (
|
||||
_parse_sse(body) if "text/event-stream" in content_type else [json.loads(body)]
|
||||
)
|
||||
except urllib.error.HTTPError as e:
|
||||
err_body = e.read().decode("utf-8", errors="replace")
|
||||
raise SystemExit(f"HTTP {e.code} from {self.url}: {err_body}") from None
|
||||
except urllib.error.URLError as e:
|
||||
raise SystemExit(f"transport error reaching {self.url}: {e.reason}") from None
|
||||
|
||||
# Pick the response matching our id; fall back to first message.
|
||||
matched = next((m for m in messages if m.get("id") == rid), messages[0] if messages else None)
|
||||
if matched is None:
|
||||
raise SystemExit(f"no response for {method}")
|
||||
if "error" in matched:
|
||||
err = matched["error"]
|
||||
raise SystemExit(f"MCP error {err.get('code')}: {err.get('message')}")
|
||||
return matched.get("result")
|
||||
|
||||
def initialize(self) -> dict[str, Any]:
|
||||
return self.send(
|
||||
"initialize",
|
||||
{
|
||||
"protocolVersion": "2024-11-05",
|
||||
"capabilities": {},
|
||||
"clientInfo": {"name": "demo-mcp-call.py", "version": "1.0.0"},
|
||||
},
|
||||
)
|
||||
|
||||
def list_tools(self) -> list[dict[str, Any]]:
|
||||
result = self.send("tools/list")
|
||||
return result.get("tools", []) if isinstance(result, dict) else []
|
||||
|
||||
def call_tool(self, name: str, args: dict[str, Any]) -> Any:
|
||||
return self.send("tools/call", {"name": name, "arguments": args})
|
||||
|
||||
|
||||
def main() -> int:
|
||||
ap = argparse.ArgumentParser(description="Demo MCP request via McpToken bearer.")
|
||||
ap.add_argument("--url", default=os.environ.get("MCPGW_URL", "http://localhost:3200"),
|
||||
help="Base URL of mcplocal (default: $MCPGW_URL or http://localhost:3200)")
|
||||
ap.add_argument("--project", default="sre",
|
||||
help="Project name (default: sre). Must match the token's bound project.")
|
||||
ap.add_argument("--token", default=os.environ.get("MCPCTL_TOKEN"),
|
||||
help="Raw mcpctl_pat_* bearer (default: $MCPCTL_TOKEN)")
|
||||
ap.add_argument("--tool", help="Optionally call a tool after tools/list")
|
||||
ap.add_argument("--args", default="{}", help="JSON-encoded arguments for --tool")
|
||||
ap.add_argument("--timeout", type=float, default=30.0)
|
||||
opts = ap.parse_args()
|
||||
|
||||
if not opts.token:
|
||||
ap.error("--token or $MCPCTL_TOKEN required")
|
||||
|
||||
endpoint = f"{opts.url.rstrip('/')}/projects/{opts.project}/mcp"
|
||||
print(f"→ POST {endpoint}")
|
||||
print(f" Bearer: {opts.token[:16]}…")
|
||||
print()
|
||||
|
||||
sess = McpSession(endpoint, bearer=opts.token, timeout=opts.timeout)
|
||||
|
||||
info = sess.initialize()
|
||||
server_info = info.get("serverInfo", {}) if isinstance(info, dict) else {}
|
||||
print(f"initialize: protocol={info.get('protocolVersion') if isinstance(info, dict) else '?'} "
|
||||
f"server={server_info.get('name', '?')}/{server_info.get('version', '?')} "
|
||||
f"sessionId={sess.session_id}")
|
||||
|
||||
tools = sess.list_tools()
|
||||
print(f"tools/list: {len(tools)} tool(s)")
|
||||
for t in tools:
|
||||
desc = (t.get("description") or "").splitlines()[0][:80]
|
||||
print(f" - {t['name']} {desc}")
|
||||
|
||||
if opts.tool:
|
||||
try:
|
||||
args = json.loads(opts.args)
|
||||
except json.JSONDecodeError as e:
|
||||
raise SystemExit(f"--args must be valid JSON: {e}")
|
||||
print(f"\ntools/call: {opts.tool} {args}")
|
||||
result = sess.call_tool(opts.tool, args)
|
||||
print(json.dumps(result, indent=2)[:2000])
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -1,4 +1,5 @@
|
||||
import http from 'node:http';
|
||||
import https from 'node:https';
|
||||
|
||||
export interface ApiClientOptions {
|
||||
baseUrl: string;
|
||||
@@ -31,16 +32,18 @@ function request<T>(method: string, url: string, timeout: number, body?: unknown
|
||||
if (token) {
|
||||
headers['Authorization'] = `Bearer ${token}`;
|
||||
}
|
||||
const isHttps = parsed.protocol === 'https:';
|
||||
const opts: http.RequestOptions = {
|
||||
hostname: parsed.hostname,
|
||||
port: parsed.port,
|
||||
port: parsed.port || (isHttps ? 443 : 80),
|
||||
path: parsed.pathname + parsed.search,
|
||||
method,
|
||||
timeout,
|
||||
headers,
|
||||
};
|
||||
|
||||
const req = http.request(opts, (res) => {
|
||||
const driver = isHttps ? https : http;
|
||||
const req = driver.request(opts, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
|
||||
@@ -132,6 +132,15 @@ const ProjectSpecSchema = z.object({
|
||||
servers: z.array(z.string()).default([]),
|
||||
});
|
||||
|
||||
const McpTokenSpecSchema = z.object({
|
||||
name: z.string().min(1).max(100).regex(/^[a-z0-9-]+$/),
|
||||
project: z.string().min(1),
|
||||
description: z.string().default(''),
|
||||
expiresAt: z.union([z.string().datetime(), z.null()]).optional(),
|
||||
rbacMode: z.enum(['empty', 'clone']).default('empty'),
|
||||
bindings: z.array(RbacRoleBindingSchema).default([]),
|
||||
});
|
||||
|
||||
const ApplyConfigSchema = z.object({
|
||||
secrets: z.array(SecretSpecSchema).default([]),
|
||||
servers: z.array(ServerSpecSchema).default([]),
|
||||
@@ -143,6 +152,7 @@ const ApplyConfigSchema = z.object({
|
||||
rbacBindings: z.array(RbacBindingSpecSchema).default([]),
|
||||
rbac: z.array(RbacBindingSpecSchema).default([]),
|
||||
prompts: z.array(PromptSpecSchema).default([]),
|
||||
mcptokens: z.array(McpTokenSpecSchema).default([]),
|
||||
}).transform((data) => ({
|
||||
...data,
|
||||
// Merge rbac into rbacBindings so both keys work
|
||||
@@ -182,6 +192,7 @@ export function createApplyCommand(deps: ApplyCommandDeps): Command {
|
||||
if (config.serverattachments.length > 0) log(` ${config.serverattachments.length} serverattachment(s)`);
|
||||
if (config.rbacBindings.length > 0) log(` ${config.rbacBindings.length} rbacBinding(s)`);
|
||||
if (config.prompts.length > 0) log(` ${config.prompts.length} prompt(s)`);
|
||||
if (config.mcptokens.length > 0) log(` ${config.mcptokens.length} mcptoken(s)`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -217,6 +228,7 @@ const KIND_TO_RESOURCE: Record<string, string> = {
|
||||
prompt: 'prompts',
|
||||
promptrequest: 'promptrequests',
|
||||
serverattachment: 'serverattachments',
|
||||
mcptoken: 'mcptokens',
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -529,6 +541,46 @@ async function applyConfig(client: ApiClient, config: ApplyConfig, log: (...args
|
||||
log(`Error applying prompt '${prompt.name}': ${err instanceof Error ? err.message : err}`);
|
||||
}
|
||||
}
|
||||
|
||||
// --- McpTokens ---
|
||||
// Apply semantics: tokens are immutable (their secret is minted once). If an
|
||||
// active token with the same name+project already exists we skip, logging the
|
||||
// state. Otherwise we create and log the raw token (shown exactly once).
|
||||
for (const tok of config.mcptokens) {
|
||||
try {
|
||||
const proj = await cachedFindByName('projects', tok.project);
|
||||
if (!proj) {
|
||||
log(`Error applying mcptoken '${tok.name}': project '${tok.project}' not found`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if an active one already exists
|
||||
const existing = await client
|
||||
.get<Array<{ id: string; name: string; status: string }>>(`/api/v1/mcptokens?projectName=${encodeURIComponent(tok.project)}`)
|
||||
.catch(() => []);
|
||||
const active = existing.find((t) => t.name === tok.name && t.status === 'active');
|
||||
if (active) {
|
||||
log(`mcptoken '${tok.name}' already active in project '${tok.project}' — skipped (tokens are immutable)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const body: Record<string, unknown> = {
|
||||
name: tok.name,
|
||||
projectId: proj.id,
|
||||
description: tok.description,
|
||||
rbacMode: tok.rbacMode,
|
||||
bindings: tok.bindings,
|
||||
};
|
||||
if (tok.expiresAt !== undefined) body.expiresAt = tok.expiresAt;
|
||||
|
||||
const created = await withRetry(() => client.post<{ id: string; name: string; token: string }>('/api/v1/mcptokens', body));
|
||||
log(`Created mcptoken: ${tok.name} (project: ${tok.project})`);
|
||||
log(` token: ${created.token}`);
|
||||
log(' (raw token shown once — copy it now)');
|
||||
} catch (err) {
|
||||
log(`Error applying mcptoken '${tok.name}': ${err instanceof Error ? err.message : err}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function findByField<T extends string>(client: ApiClient, resource: string, field: T, value: string): Promise<unknown | null> {
|
||||
|
||||
@@ -23,6 +23,9 @@ export interface AuditEvent {
|
||||
serverName: string | null;
|
||||
correlationId: string | null;
|
||||
parentEventId: string | null;
|
||||
userName?: string | null;
|
||||
tokenName?: string | null;
|
||||
tokenSha?: string | null;
|
||||
payload: Record<string, unknown>;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Command } from 'commander';
|
||||
import { type ApiClient, ApiError } from '../api-client.js';
|
||||
import { resolveNameOrId } from './shared.js';
|
||||
import { parseRoleBinding } from './rbac-bindings.js';
|
||||
export interface CreateCommandDeps {
|
||||
client: ApiClient;
|
||||
log: (...args: unknown[]) => void;
|
||||
@@ -10,6 +11,37 @@ function collect(value: string, prev: string[]): string[] {
|
||||
return [...prev, value];
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a `--ttl` value.
|
||||
*
|
||||
* - `"never"` → null (no expiry)
|
||||
* - `"30d"`, `"12h"`, `"2w"`, `"90m"`, `"60s"` → ISO8601 string relative to now
|
||||
* - An ISO8601 datetime → returned as-is
|
||||
*/
|
||||
function parseTtl(value: string): string | null {
|
||||
const trimmed = value.trim();
|
||||
if (trimmed.toLowerCase() === 'never') return null;
|
||||
const match = trimmed.match(/^(\d+)([smhdw])$/i);
|
||||
if (match) {
|
||||
const amount = Number(match[1]);
|
||||
const unit = match[2]!.toLowerCase();
|
||||
const multipliers: Record<string, number> = {
|
||||
s: 1000,
|
||||
m: 60 * 1000,
|
||||
h: 3600 * 1000,
|
||||
d: 86400 * 1000,
|
||||
w: 7 * 86400 * 1000,
|
||||
};
|
||||
return new Date(Date.now() + amount * multipliers[unit]!).toISOString();
|
||||
}
|
||||
// Try to parse as ISO8601
|
||||
const parsed = new Date(trimmed);
|
||||
if (isNaN(parsed.getTime())) {
|
||||
throw new Error(`Invalid --ttl '${value}'. Expected 'never', a duration like '30d' / '12h', or an ISO8601 datetime.`);
|
||||
}
|
||||
return parsed.toISOString();
|
||||
}
|
||||
|
||||
interface ServerEnvEntry {
|
||||
name: string;
|
||||
value?: string;
|
||||
@@ -331,8 +363,12 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
|
||||
.description('Create an RBAC binding definition')
|
||||
.argument('<name>', 'RBAC binding name')
|
||||
.option('--subject <entry>', 'Subject as Kind:name (repeat for multiple)', collect, [])
|
||||
.option('--binding <entry>', 'Role binding as role:resource (e.g. edit:servers, run:projects)', collect, [])
|
||||
.option('--operation <action>', 'Operation binding (e.g. logs, backup)', collect, [])
|
||||
.option(
|
||||
'--roleBindings <entry>',
|
||||
'Role binding as key:value pairs, e.g. "role:view,resource:servers" or "role:view,resource:servers,name:my-ha" or "action:logs" (repeat for multiple)',
|
||||
collect,
|
||||
[],
|
||||
)
|
||||
.option('--force', 'Update if already exists')
|
||||
.action(async (name: string, opts) => {
|
||||
const subjects = (opts.subject as string[]).map((entry: string) => {
|
||||
@@ -343,24 +379,7 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
|
||||
return { kind: entry.slice(0, colonIdx), name: entry.slice(colonIdx + 1) };
|
||||
});
|
||||
|
||||
const roleBindings: Array<Record<string, string>> = [];
|
||||
|
||||
// Resource bindings from --binding flag (role:resource or role:resource:name)
|
||||
for (const entry of opts.binding as string[]) {
|
||||
const parts = entry.split(':');
|
||||
if (parts.length === 2) {
|
||||
roleBindings.push({ role: parts[0]!, resource: parts[1]! });
|
||||
} else if (parts.length === 3) {
|
||||
roleBindings.push({ role: parts[0]!, resource: parts[1]!, name: parts[2]! });
|
||||
} else {
|
||||
throw new Error(`Invalid binding format '${entry}'. Expected role:resource or role:resource:name (e.g. edit:servers, view:servers:my-ha)`);
|
||||
}
|
||||
}
|
||||
|
||||
// Operation bindings from --operation flag
|
||||
for (const action of opts.operation as string[]) {
|
||||
roleBindings.push({ role: 'run', action });
|
||||
}
|
||||
const roleBindings = (opts.roleBindings as string[]).map((entry: string) => parseRoleBinding(entry));
|
||||
|
||||
const body: Record<string, unknown> = {
|
||||
name,
|
||||
@@ -384,6 +403,83 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
|
||||
}
|
||||
});
|
||||
|
||||
// --- create mcptoken ---
|
||||
cmd.command('mcptoken')
|
||||
.description('Create a project-scoped API token for HTTP-mode mcplocal. The raw token is printed once.')
|
||||
.argument('<name>', 'Token name (unique within a project)')
|
||||
.requiredOption('-p, --project <name>', 'Project this token is bound to')
|
||||
.option('--rbac <mode>', "Base RBAC: 'empty' (default, no bindings) or 'clone' (snapshot creator's perms)", 'empty')
|
||||
.option(
|
||||
'--bind <entry>',
|
||||
'Additional role binding as key:value pairs, e.g. "role:view,resource:servers" or "action:logs" (repeat for multiple). Creator perms are the ceiling.',
|
||||
collect,
|
||||
[],
|
||||
)
|
||||
.option('--ttl <duration>', "Expiry: '30d', '12h', 'never', or an ISO8601 datetime")
|
||||
.option('--description <text>', 'Freeform description')
|
||||
.option('--force', 'Revoke any existing active token with this name, then create a new one')
|
||||
.action(async (name: string, opts) => {
|
||||
// Resolve project name → id (mcpd's create route accepts either, but resolve client-side for clearer errors)
|
||||
const projectId = await resolveNameOrId(client, 'projects', opts.project as string);
|
||||
|
||||
const bindings = (opts.bind as string[]).map((entry: string) => parseRoleBinding(entry));
|
||||
|
||||
const rbacMode = (opts.rbac as string).toLowerCase();
|
||||
if (rbacMode !== 'empty' && rbacMode !== 'clone') {
|
||||
throw new Error(`--rbac must be 'empty' or 'clone' (got '${opts.rbac as string}')`);
|
||||
}
|
||||
|
||||
let expiresAt: string | null | undefined;
|
||||
if (opts.ttl !== undefined) {
|
||||
expiresAt = parseTtl(opts.ttl as string);
|
||||
}
|
||||
|
||||
const body: Record<string, unknown> = {
|
||||
name,
|
||||
projectId,
|
||||
rbacMode,
|
||||
bindings,
|
||||
};
|
||||
if (expiresAt !== undefined) body.expiresAt = expiresAt;
|
||||
if (opts.description !== undefined) body.description = opts.description;
|
||||
|
||||
type Created = {
|
||||
id: string;
|
||||
name: string;
|
||||
projectName: string;
|
||||
tokenPrefix: string;
|
||||
token: string;
|
||||
expiresAt: string | null;
|
||||
};
|
||||
|
||||
const doCreate = async (): Promise<Created> => client.post<Created>('/api/v1/mcptokens', body);
|
||||
|
||||
let created: Created;
|
||||
try {
|
||||
created = await doCreate();
|
||||
} catch (err) {
|
||||
if (err instanceof ApiError && err.status === 409 && opts.force) {
|
||||
// Find the existing active token by name+project and revoke it, then retry.
|
||||
const existing = (await client.get<Array<{ id: string; name: string }>>(
|
||||
`/api/v1/mcptokens?projectName=${encodeURIComponent(opts.project as string)}`,
|
||||
)).find((r) => r.name === name);
|
||||
if (!existing) throw err;
|
||||
await client.post(`/api/v1/mcptokens/${existing.id}/revoke`, {});
|
||||
created = await doCreate();
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
log(`mcptoken '${created.name}' created (project: ${created.projectName}, id: ${created.id})`);
|
||||
log('');
|
||||
log('Copy this token now — it will NOT be shown again:');
|
||||
log('');
|
||||
log(` ${created.token}`);
|
||||
log('');
|
||||
log(`Export it with: export MCPCTL_TOKEN=${created.token}`);
|
||||
});
|
||||
|
||||
// --- create prompt ---
|
||||
cmd.command('prompt')
|
||||
.description('Create an approved prompt')
|
||||
|
||||
@@ -29,6 +29,27 @@ export function createDeleteCommand(deps: DeleteCommandDeps): Command {
|
||||
return;
|
||||
}
|
||||
|
||||
// Mcptokens: names are scoped to a project, so require --project unless the caller passes a CUID
|
||||
if (resource === 'mcptokens') {
|
||||
let tokenId: string;
|
||||
if (/^c[a-z0-9]{24}/.test(idOrName)) {
|
||||
tokenId = idOrName;
|
||||
} else {
|
||||
if (!opts.project) {
|
||||
throw new Error('--project is required to delete an mcptoken by name (or pass the id).');
|
||||
}
|
||||
const items = await client.get<Array<{ id: string; name: string }>>(
|
||||
`/api/v1/mcptokens?projectName=${encodeURIComponent(opts.project)}`,
|
||||
);
|
||||
const match = items.find((i) => i.name === idOrName);
|
||||
if (!match) throw new Error(`mcptoken '${idOrName}' not found in project '${opts.project}'`);
|
||||
tokenId = match.id;
|
||||
}
|
||||
await client.delete(`/api/v1/mcptokens/${tokenId}`);
|
||||
log(`mcptoken '${idOrName}' deleted.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Resolve name → ID for any resource type
|
||||
let id: string;
|
||||
try {
|
||||
|
||||
@@ -503,6 +503,42 @@ function formatRbacDetail(rbac: Record<string, unknown>): string {
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function formatMcpTokenDetail(token: Record<string, unknown>, allRbac: RbacDef[]): string {
|
||||
const lines: string[] = [];
|
||||
lines.push(`=== McpToken: ${token.name} ===`);
|
||||
lines.push(`${pad('Name:')}${token.name}`);
|
||||
lines.push(`${pad('Project:')}${token.projectName ?? token.projectId ?? '-'}`);
|
||||
lines.push(`${pad('Status:')}${token.status ?? '-'}`);
|
||||
lines.push(`${pad('Prefix:')}${token.tokenPrefix ?? '-'}`);
|
||||
if (token.description) lines.push(`${pad('Description:')}${token.description}`);
|
||||
lines.push(`${pad('Owner:')}${token.ownerEmail ?? token.ownerId ?? '-'}`);
|
||||
lines.push(`${pad('Created:')}${token.createdAt ?? '-'}`);
|
||||
lines.push(`${pad('Last Used:')}${token.lastUsedAt ?? 'never'}`);
|
||||
lines.push(`${pad('Expires:')}${token.expiresAt ?? 'never'}`);
|
||||
if (token.revokedAt) lines.push(`${pad('Revoked At:')}${token.revokedAt}`);
|
||||
|
||||
// Find the auto-created RbacDefinition (subject McpToken:<sha>) to surface bindings.
|
||||
// We don't know the sha from the describe response — match by convention: name 'mcptoken-<id>'.
|
||||
const rbacDef = allRbac.find((r) => r.name === `mcptoken-${token.id as string}`);
|
||||
if (rbacDef && Array.isArray(rbacDef.roleBindings) && rbacDef.roleBindings.length > 0) {
|
||||
lines.push('');
|
||||
lines.push('Bindings:');
|
||||
for (const b of rbacDef.roleBindings as Array<{ role: string; resource?: string; action?: string; name?: string }>) {
|
||||
if (b.action !== undefined) {
|
||||
lines.push(` run ${b.action}`);
|
||||
} else if (b.resource !== undefined) {
|
||||
lines.push(` ${b.role} ${b.resource}${b.name !== undefined ? `/${b.name}` : ''}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lines.push('');
|
||||
lines.push('Metadata:');
|
||||
lines.push(` ${pad('ID:', 12)}${token.id}`);
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
async function formatPromptDetail(prompt: Record<string, unknown>, client?: ApiClient): Promise<string> {
|
||||
const lines: string[] = [];
|
||||
lines.push(`=== Prompt: ${prompt.name} ===`);
|
||||
@@ -801,6 +837,14 @@ export function createDescribeCommand(deps: DescribeCommandDeps): Command {
|
||||
case 'prompts':
|
||||
deps.log(await formatPromptDetail(item, deps.client));
|
||||
break;
|
||||
case 'mcptokens': {
|
||||
// Fetch the auto-created RbacDefinition (if any) so bindings are visible in describe.
|
||||
const rbacForToken = await deps.client
|
||||
.get<RbacDef[]>('/api/v1/rbac')
|
||||
.catch(() => [] as RbacDef[]);
|
||||
deps.log(formatMcpTokenDetail(item, rbacForToken));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
deps.log(formatGenericDetail(item));
|
||||
}
|
||||
|
||||
@@ -119,6 +119,27 @@ const rbacColumns: Column<RbacRow>[] = [
|
||||
{ header: 'ID', key: 'id' },
|
||||
];
|
||||
|
||||
interface McpTokenRow {
|
||||
id: string;
|
||||
name: string;
|
||||
projectName: string;
|
||||
tokenPrefix: string;
|
||||
createdAt: string;
|
||||
lastUsedAt: string | null;
|
||||
expiresAt: string | null;
|
||||
status: 'active' | 'revoked' | 'expired';
|
||||
}
|
||||
|
||||
const mcpTokenColumns: Column<McpTokenRow>[] = [
|
||||
{ header: 'NAME', key: 'name', width: 24 },
|
||||
{ header: 'PROJECT', key: 'projectName', width: 20 },
|
||||
{ header: 'PREFIX', key: 'tokenPrefix', width: 18 },
|
||||
{ header: 'CREATED', key: (r) => new Date(r.createdAt).toLocaleString(), width: 20 },
|
||||
{ header: 'LAST USED', key: (r) => r.lastUsedAt ? new Date(r.lastUsedAt).toLocaleString() : '-', width: 20 },
|
||||
{ header: 'EXPIRES', key: (r) => r.expiresAt ? new Date(r.expiresAt).toLocaleString() : 'never', width: 20 },
|
||||
{ header: 'STATUS', key: 'status', width: 10 },
|
||||
];
|
||||
|
||||
const secretColumns: Column<SecretRow>[] = [
|
||||
{ header: 'NAME', key: 'name' },
|
||||
{ header: 'KEYS', key: (r) => Object.keys(r.data).join(', ') || '-', width: 40 },
|
||||
@@ -242,6 +263,8 @@ function getColumnsForResource(resource: string): Column<Record<string, unknown>
|
||||
return serverAttachmentColumns as unknown as Column<Record<string, unknown>>[];
|
||||
case 'proxymodels':
|
||||
return proxymodelColumns as unknown as Column<Record<string, unknown>>[];
|
||||
case 'mcptokens':
|
||||
return mcpTokenColumns as unknown as Column<Record<string, unknown>>[];
|
||||
default:
|
||||
return [
|
||||
{ header: 'ID', key: 'id' as keyof Record<string, unknown> },
|
||||
@@ -263,6 +286,7 @@ const RESOURCE_KIND: Record<string, string> = {
|
||||
prompts: 'prompt',
|
||||
promptrequests: 'promptrequest',
|
||||
serverattachments: 'serverattachment',
|
||||
mcptokens: 'mcptoken',
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
49
src/cli/src/commands/rbac-bindings.ts
Normal file
49
src/cli/src/commands/rbac-bindings.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
/**
|
||||
* Parse one `--roleBindings <kv>` entry into a role-binding object the API accepts.
|
||||
*
|
||||
* Accepted forms:
|
||||
* role:view,resource:servers → resource binding (unscoped)
|
||||
* role:view,resource:servers,name:my-ha → resource binding (name-scoped)
|
||||
* action:logs → operation binding (role:run is implied)
|
||||
*
|
||||
* Whitespace around keys/values is trimmed. Keys must be one of: role, resource, name, action.
|
||||
*/
|
||||
export type RoleBindingEntry =
|
||||
| { role: string; resource: string; name?: string }
|
||||
| { role: 'run'; action: string };
|
||||
|
||||
export function parseRoleBinding(entry: string): RoleBindingEntry {
|
||||
const pairs: Record<string, string> = {};
|
||||
for (const part of entry.split(',')) {
|
||||
const colonIdx = part.indexOf(':');
|
||||
if (colonIdx === -1) {
|
||||
throw new Error(`Invalid roleBindings entry '${entry}': expected key:value pairs separated by commas`);
|
||||
}
|
||||
const key = part.slice(0, colonIdx).trim();
|
||||
const value = part.slice(colonIdx + 1).trim();
|
||||
if (!key || !value) {
|
||||
throw new Error(`Invalid roleBindings entry '${entry}': empty key or value`);
|
||||
}
|
||||
if (!['role', 'resource', 'name', 'action'].includes(key)) {
|
||||
throw new Error(`Invalid roleBindings key '${key}' in '${entry}': expected one of role, resource, name, action`);
|
||||
}
|
||||
pairs[key] = value;
|
||||
}
|
||||
|
||||
// Operation binding: presence of `action:` implies role:run
|
||||
if (pairs['action'] !== undefined) {
|
||||
if (pairs['resource'] !== undefined || pairs['name'] !== undefined) {
|
||||
throw new Error(`Invalid roleBindings entry '${entry}': 'action' cannot be combined with 'resource' or 'name'`);
|
||||
}
|
||||
return { role: 'run', action: pairs['action'] };
|
||||
}
|
||||
|
||||
// Resource binding
|
||||
if (pairs['role'] === undefined || pairs['resource'] === undefined) {
|
||||
throw new Error(`Invalid roleBindings entry '${entry}': need either 'action:…' or both 'role:…,resource:…'`);
|
||||
}
|
||||
if (pairs['name'] !== undefined) {
|
||||
return { role: pairs['role'], resource: pairs['resource'], name: pairs['name'] };
|
||||
}
|
||||
return { role: pairs['role'], resource: pairs['resource'] };
|
||||
}
|
||||
@@ -27,6 +27,10 @@ export const RESOURCE_ALIASES: Record<string, string> = {
|
||||
proxymodel: 'proxymodels',
|
||||
proxymodels: 'proxymodels',
|
||||
pm: 'proxymodels',
|
||||
mcptoken: 'mcptokens',
|
||||
mcptokens: 'mcptokens',
|
||||
token: 'mcptokens',
|
||||
tokens: 'mcptokens',
|
||||
all: 'all',
|
||||
};
|
||||
|
||||
@@ -72,6 +76,21 @@ export function stripInternalFields(obj: Record<string, unknown>): Record<string
|
||||
delete result[key];
|
||||
}
|
||||
|
||||
// McpToken-specific: promote projectName → project; drop secret/derived fields
|
||||
if ('tokenHash' in result || 'tokenPrefix' in result) {
|
||||
delete result.tokenHash;
|
||||
delete result.tokenPrefix;
|
||||
delete result.lastUsedAt;
|
||||
delete result.revokedAt;
|
||||
delete result.status;
|
||||
delete result.ownerEmail;
|
||||
if (typeof result.projectName === 'string') {
|
||||
result.project = result.projectName;
|
||||
delete result.projectName;
|
||||
delete result.projectId;
|
||||
}
|
||||
}
|
||||
|
||||
// Rename linkTarget → link for cleaner YAML
|
||||
if ('linkTarget' in result) {
|
||||
result.link = result.linkTarget;
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
import { Command } from 'commander';
|
||||
import http from 'node:http';
|
||||
import https from 'node:https';
|
||||
|
||||
/** Pick the http or https driver based on the URL scheme. */
|
||||
function httpDriverFor(url: string): typeof http | typeof https {
|
||||
return new URL(url).protocol === 'https:' ? https : http;
|
||||
}
|
||||
import { loadConfig } from '../config/index.js';
|
||||
import type { ConfigLoaderDeps } from '../config/index.js';
|
||||
import { loadCredentials } from '../auth/index.js';
|
||||
@@ -45,10 +51,16 @@ export interface StatusCommandDeps {
|
||||
|
||||
function defaultCheckHealth(url: string): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
const req = http.get(`${url}/health`, { timeout: 3000 }, (res) => {
|
||||
let req: http.ClientRequest;
|
||||
try {
|
||||
req = httpDriverFor(url).get(`${url}/health`, { timeout: 3000 }, (res) => {
|
||||
resolve(res.statusCode !== undefined && res.statusCode >= 200 && res.statusCode < 400);
|
||||
res.resume();
|
||||
});
|
||||
} catch {
|
||||
resolve(false);
|
||||
return;
|
||||
}
|
||||
req.on('error', () => resolve(false));
|
||||
req.on('timeout', () => {
|
||||
req.destroy();
|
||||
@@ -63,7 +75,9 @@ function defaultCheckHealth(url: string): Promise<boolean> {
|
||||
*/
|
||||
function defaultCheckLlm(mcplocalUrl: string): Promise<string> {
|
||||
return new Promise((resolve) => {
|
||||
const req = http.get(`${mcplocalUrl}/llm/health`, { timeout: 45000 }, (res) => {
|
||||
let req: http.ClientRequest;
|
||||
try {
|
||||
req = httpDriverFor(mcplocalUrl).get(`${mcplocalUrl}/llm/health`, { timeout: 45000 }, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
@@ -83,6 +97,10 @@ function defaultCheckLlm(mcplocalUrl: string): Promise<string> {
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch {
|
||||
resolve('mcplocal unreachable');
|
||||
return;
|
||||
}
|
||||
req.on('error', () => resolve('mcplocal unreachable'));
|
||||
req.on('timeout', () => { req.destroy(); resolve('timeout'); });
|
||||
});
|
||||
@@ -90,7 +108,9 @@ function defaultCheckLlm(mcplocalUrl: string): Promise<string> {
|
||||
|
||||
function defaultFetchModels(mcplocalUrl: string): Promise<string[]> {
|
||||
return new Promise((resolve) => {
|
||||
const req = http.get(`${mcplocalUrl}/llm/models`, { timeout: 5000 }, (res) => {
|
||||
let req: http.ClientRequest;
|
||||
try {
|
||||
req = httpDriverFor(mcplocalUrl).get(`${mcplocalUrl}/llm/models`, { timeout: 5000 }, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
@@ -102,6 +122,10 @@ function defaultFetchModels(mcplocalUrl: string): Promise<string[]> {
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch {
|
||||
resolve([]);
|
||||
return;
|
||||
}
|
||||
req.on('error', () => resolve([]));
|
||||
req.on('timeout', () => { req.destroy(); resolve([]); });
|
||||
});
|
||||
@@ -109,7 +133,9 @@ function defaultFetchModels(mcplocalUrl: string): Promise<string[]> {
|
||||
|
||||
function defaultFetchProviders(mcplocalUrl: string): Promise<ProvidersInfo | null> {
|
||||
return new Promise((resolve) => {
|
||||
const req = http.get(`${mcplocalUrl}/llm/providers`, { timeout: 5000 }, (res) => {
|
||||
let req: http.ClientRequest;
|
||||
try {
|
||||
req = httpDriverFor(mcplocalUrl).get(`${mcplocalUrl}/llm/providers`, { timeout: 5000 }, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
@@ -121,6 +147,10 @@ function defaultFetchProviders(mcplocalUrl: string): Promise<ProvidersInfo | nul
|
||||
}
|
||||
});
|
||||
});
|
||||
} catch {
|
||||
resolve(null);
|
||||
return;
|
||||
}
|
||||
req.on('error', () => resolve(null));
|
||||
req.on('timeout', () => { req.destroy(); resolve(null); });
|
||||
});
|
||||
|
||||
176
src/cli/src/commands/test-mcp.ts
Normal file
176
src/cli/src/commands/test-mcp.ts
Normal file
@@ -0,0 +1,176 @@
|
||||
import { Command } from 'commander';
|
||||
import { McpHttpSession, McpProtocolError, McpTransportError, deriveBaseUrl, mcpHealthCheck } from '@mcpctl/shared';
|
||||
|
||||
export interface TestMcpCommandDeps {
|
||||
log: (...args: unknown[]) => void;
|
||||
/**
|
||||
* Inject a session factory for testing. The default creates a real `McpHttpSession`.
|
||||
*/
|
||||
createSession?: (url: string, opts: { bearer?: string; timeoutMs?: number }) => {
|
||||
initialize(): Promise<unknown>;
|
||||
listTools(): Promise<Array<{ name: string }>>;
|
||||
callTool(name: string, args: Record<string, unknown>): Promise<unknown>;
|
||||
close(): Promise<void>;
|
||||
};
|
||||
healthCheck?: (baseUrl: string) => Promise<boolean>;
|
||||
}
|
||||
|
||||
export type TestMcpExitCode = 0 | 1 | 2;
|
||||
|
||||
export interface TestMcpReport {
|
||||
url: string;
|
||||
health: 'ok' | 'fail' | 'skipped';
|
||||
initialize: 'ok' | 'fail';
|
||||
tools: string[] | null;
|
||||
toolCall?: { name: string; result: unknown; isError?: boolean };
|
||||
missingTools?: string[];
|
||||
exitCode: TestMcpExitCode;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export function createTestCommand(deps: TestMcpCommandDeps): Command {
|
||||
const { log } = deps;
|
||||
const createSession = deps.createSession ?? ((url, opts) => new McpHttpSession(url, opts));
|
||||
const healthCheck = deps.healthCheck ?? mcpHealthCheck;
|
||||
|
||||
const test = new Command('test').description('Utilities for testing MCP endpoints and config');
|
||||
|
||||
test
|
||||
.command('mcp')
|
||||
.description('Verify a Streamable-HTTP MCP endpoint: health, initialize, tools/list, optionally call a tool.')
|
||||
.argument('<url>', 'Full URL of the MCP endpoint (e.g. https://mcp.example.com/projects/foo/mcp)')
|
||||
.option('--token <bearer>', 'Bearer token (also reads $MCPCTL_TOKEN)')
|
||||
.option('--tool <name>', 'Invoke a specific tool after listing')
|
||||
.option('--args <json>', 'JSON-encoded arguments for --tool', '{}')
|
||||
.option('--expect-tools <list>', 'Comma-separated tool names that MUST appear; fails otherwise')
|
||||
.option('--timeout <seconds>', 'Per-request timeout in seconds', '10')
|
||||
.option('-o, --output <format>', 'Output format: text or json', 'text')
|
||||
.option('--no-health', 'Skip the /healthz preflight check')
|
||||
.action(async (url: string, opts: {
|
||||
token?: string;
|
||||
tool?: string;
|
||||
args: string;
|
||||
expectTools?: string;
|
||||
timeout: string;
|
||||
output: string;
|
||||
health: boolean;
|
||||
}) => {
|
||||
const bearer = opts.token ?? process.env.MCPCTL_TOKEN;
|
||||
const timeoutMs = Number(opts.timeout) * 1000;
|
||||
if (!Number.isFinite(timeoutMs) || timeoutMs <= 0) {
|
||||
throw new Error(`--timeout must be a positive number of seconds (got '${opts.timeout}')`);
|
||||
}
|
||||
|
||||
const report: TestMcpReport = {
|
||||
url,
|
||||
health: 'skipped',
|
||||
initialize: 'fail',
|
||||
tools: null,
|
||||
exitCode: 1,
|
||||
};
|
||||
|
||||
// 1. Health preflight
|
||||
if (opts.health !== false) {
|
||||
const baseUrl = deriveBaseUrl(url);
|
||||
const ok = await healthCheck(baseUrl);
|
||||
report.health = ok ? 'ok' : 'fail';
|
||||
if (!ok) {
|
||||
report.error = `healthz preflight failed at ${baseUrl}/healthz`;
|
||||
return emit(report, opts.output, log);
|
||||
}
|
||||
}
|
||||
|
||||
const sessionOpts: { bearer?: string; timeoutMs: number } = { timeoutMs };
|
||||
if (bearer !== undefined) sessionOpts.bearer = bearer;
|
||||
const session = createSession(url, sessionOpts);
|
||||
|
||||
try {
|
||||
// 2. Initialize
|
||||
await session.initialize();
|
||||
report.initialize = 'ok';
|
||||
|
||||
// 3. tools/list
|
||||
const tools = await session.listTools();
|
||||
report.tools = tools.map((t) => t.name);
|
||||
|
||||
// 4. --expect-tools check
|
||||
if (opts.expectTools !== undefined && opts.expectTools.trim() !== '') {
|
||||
const expected = opts.expectTools.split(',').map((s) => s.trim()).filter(Boolean);
|
||||
const missing = expected.filter((name) => !report.tools!.includes(name));
|
||||
if (missing.length > 0) {
|
||||
report.missingTools = missing;
|
||||
report.exitCode = 2;
|
||||
report.error = `Missing tools: ${missing.join(', ')}`;
|
||||
return emit(report, opts.output, log);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Optional --tool call
|
||||
if (opts.tool !== undefined) {
|
||||
let parsedArgs: Record<string, unknown> = {};
|
||||
try {
|
||||
parsedArgs = JSON.parse(opts.args) as Record<string, unknown>;
|
||||
} catch {
|
||||
throw new Error(`--args must be valid JSON (got '${opts.args}')`);
|
||||
}
|
||||
const result = await session.callTool(opts.tool, parsedArgs);
|
||||
const toolCall: TestMcpReport['toolCall'] = { name: opts.tool, result };
|
||||
if (typeof result === 'object' && result !== null && 'isError' in result) {
|
||||
toolCall.isError = Boolean((result as { isError?: boolean }).isError);
|
||||
}
|
||||
report.toolCall = toolCall;
|
||||
if (toolCall.isError) {
|
||||
report.exitCode = 2;
|
||||
report.error = `Tool '${opts.tool}' returned isError=true`;
|
||||
return emit(report, opts.output, log);
|
||||
}
|
||||
}
|
||||
|
||||
report.exitCode = 0;
|
||||
} catch (err) {
|
||||
if (err instanceof McpProtocolError) {
|
||||
report.exitCode = 1;
|
||||
report.error = `protocol error ${err.code}: ${err.message}`;
|
||||
} else if (err instanceof McpTransportError) {
|
||||
report.exitCode = 1;
|
||||
report.error = `transport error (HTTP ${err.status}): ${err.message}`;
|
||||
} else {
|
||||
report.exitCode = 1;
|
||||
report.error = err instanceof Error ? err.message : String(err);
|
||||
}
|
||||
} finally {
|
||||
await session.close().catch(() => { /* best-effort */ });
|
||||
}
|
||||
|
||||
return emit(report, opts.output, log);
|
||||
});
|
||||
|
||||
return test;
|
||||
}
|
||||
|
||||
function emit(report: TestMcpReport, output: string, log: (...args: unknown[]) => void): void {
|
||||
if (output === 'json') {
|
||||
log(JSON.stringify(report, null, 2));
|
||||
} else {
|
||||
log(`URL: ${report.url}`);
|
||||
log(`Health: ${report.health}`);
|
||||
log(`Initialize: ${report.initialize}`);
|
||||
if (report.tools !== null) {
|
||||
log(`Tools (${report.tools.length}): ${report.tools.slice(0, 10).join(', ')}${report.tools.length > 10 ? `, …(+${report.tools.length - 10})` : ''}`);
|
||||
}
|
||||
if (report.missingTools !== undefined) {
|
||||
log(`Missing: ${report.missingTools.join(', ')}`);
|
||||
}
|
||||
if (report.toolCall !== undefined) {
|
||||
log(`Tool call: ${report.toolCall.name} → ${report.toolCall.isError ? 'ERROR' : 'ok'}`);
|
||||
}
|
||||
if (report.error !== undefined) {
|
||||
log(`Error: ${report.error}`);
|
||||
}
|
||||
log(`Result: ${report.exitCode === 0 ? 'PASS' : report.exitCode === 2 ? 'CONTRACT FAIL' : 'TRANSPORT/AUTH FAIL'}`);
|
||||
}
|
||||
|
||||
if (report.exitCode !== 0) {
|
||||
process.exitCode = report.exitCode;
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import { createDescribeCommand } from './commands/describe.js';
|
||||
import { createDeleteCommand } from './commands/delete.js';
|
||||
import { createLogsCommand } from './commands/logs.js';
|
||||
import { createApplyCommand } from './commands/apply.js';
|
||||
import { createTestCommand } from './commands/test-mcp.js';
|
||||
import { createCreateCommand } from './commands/create.js';
|
||||
import { createEditCommand } from './commands/edit.js';
|
||||
import { createBackupCommand } from './commands/backup.js';
|
||||
@@ -99,6 +100,25 @@ export function createProgram(): Command {
|
||||
}
|
||||
}
|
||||
|
||||
// --project scoping for mcptokens
|
||||
if (!nameOrId && resource === 'mcptokens' && projectName) {
|
||||
return client.get<unknown[]>(`/api/v1/mcptokens?projectName=${encodeURIComponent(projectName)}`);
|
||||
}
|
||||
|
||||
// Name-based lookup for mcptokens: names are unique only within a project
|
||||
if (nameOrId && resource === 'mcptokens' && !/^c[a-z0-9]{24}/.test(nameOrId)) {
|
||||
if (!projectName) {
|
||||
throw new Error('mcptoken names are scoped to a project — pass --project <name> or use the token id (cuid)');
|
||||
}
|
||||
const items = await client.get<Array<{ id: string; name: string }>>(
|
||||
`/api/v1/mcptokens?projectName=${encodeURIComponent(projectName)}`,
|
||||
);
|
||||
const match = items.find((i) => i.name === nameOrId);
|
||||
if (!match) throw new Error(`mcptoken '${nameOrId}' not found in project '${projectName}'`);
|
||||
const item = await client.get(`/api/v1/mcptokens/${match.id}`);
|
||||
return [item];
|
||||
}
|
||||
|
||||
if (nameOrId) {
|
||||
// Glob pattern — use query param filtering
|
||||
if (nameOrId.includes('*')) {
|
||||
@@ -132,6 +152,19 @@ export function createProgram(): Command {
|
||||
return client.get(`/api/v1/${resource}/${match.id as string}`);
|
||||
}
|
||||
|
||||
// Mcptokens: names are project-scoped. CUIDs pass straight through.
|
||||
if (resource === 'mcptokens' && !/^c[a-z0-9]{24}/.test(nameOrId)) {
|
||||
if (!projectName) {
|
||||
throw new Error('mcptoken names are scoped to a project — pass --project <name> or use the token id (cuid)');
|
||||
}
|
||||
const items = await client.get<Array<Record<string, unknown>>>(
|
||||
`/api/v1/mcptokens?projectName=${encodeURIComponent(projectName)}`,
|
||||
);
|
||||
const match = items.find((item) => item.name === nameOrId);
|
||||
if (!match) throw new Error(`mcptoken '${nameOrId}' not found in project '${projectName}'`);
|
||||
return client.get(`/api/v1/mcptokens/${match.id as string}`);
|
||||
}
|
||||
|
||||
let id: string;
|
||||
try {
|
||||
id = await resolveNameOrId(client, resource, nameOrId);
|
||||
@@ -212,6 +245,10 @@ export function createProgram(): Command {
|
||||
mcplocalUrl: config.mcplocalUrl,
|
||||
}));
|
||||
|
||||
program.addCommand(createTestCommand({
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
|
||||
@@ -318,8 +318,8 @@ describe('create command', () => {
|
||||
'rbac', 'developers',
|
||||
'--subject', 'User:alice@test.com',
|
||||
'--subject', 'Group:dev-team',
|
||||
'--binding', 'edit:servers',
|
||||
'--binding', 'view:instances',
|
||||
'--roleBindings', 'role:edit,resource:servers',
|
||||
'--roleBindings', 'role:view,resource:instances',
|
||||
], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
||||
@@ -342,7 +342,7 @@ describe('create command', () => {
|
||||
await cmd.parseAsync([
|
||||
'rbac', 'admins',
|
||||
'--subject', 'User:admin@test.com',
|
||||
'--binding', 'edit:*',
|
||||
'--roleBindings', 'role:edit,resource:*',
|
||||
], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
||||
@@ -371,18 +371,18 @@ describe('create command', () => {
|
||||
).rejects.toThrow('Invalid subject format');
|
||||
});
|
||||
|
||||
it('throws on invalid binding format', async () => {
|
||||
it('throws on invalid roleBindings format', async () => {
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await expect(
|
||||
cmd.parseAsync(['rbac', 'bad', '--binding', 'no-colon'], { from: 'user' }),
|
||||
).rejects.toThrow('Invalid binding format');
|
||||
cmd.parseAsync(['rbac', 'bad', '--roleBindings', 'no-colon'], { from: 'user' }),
|
||||
).rejects.toThrow(/Invalid roleBindings/);
|
||||
});
|
||||
|
||||
it('throws on 409 without --force', async () => {
|
||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"RBAC already exists"}'));
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await expect(
|
||||
cmd.parseAsync(['rbac', 'developers', '--subject', 'User:a@b.com', '--binding', 'edit:servers'], { from: 'user' }),
|
||||
cmd.parseAsync(['rbac', 'developers', '--subject', 'User:a@b.com', '--roleBindings', 'role:edit,resource:servers'], { from: 'user' }),
|
||||
).rejects.toThrow('API error 409');
|
||||
});
|
||||
|
||||
@@ -393,7 +393,7 @@ describe('create command', () => {
|
||||
await cmd.parseAsync([
|
||||
'rbac', 'developers',
|
||||
'--subject', 'User:new@test.com',
|
||||
'--binding', 'edit:*',
|
||||
'--roleBindings', 'role:edit,resource:*',
|
||||
'--force',
|
||||
], { from: 'user' });
|
||||
|
||||
@@ -404,15 +404,15 @@ describe('create command', () => {
|
||||
expect(output.join('\n')).toContain("rbac 'developers' updated");
|
||||
});
|
||||
|
||||
it('creates an RBAC definition with operation bindings', async () => {
|
||||
it('creates an RBAC definition with operation bindings (action:… shorthand)', async () => {
|
||||
vi.mocked(client.post).mockResolvedValueOnce({ id: 'rbac-1', name: 'ops' });
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync([
|
||||
'rbac', 'ops',
|
||||
'--subject', 'Group:ops-team',
|
||||
'--binding', 'edit:servers',
|
||||
'--operation', 'logs',
|
||||
'--operation', 'backup',
|
||||
'--roleBindings', 'role:edit,resource:servers',
|
||||
'--roleBindings', 'action:logs',
|
||||
'--roleBindings', 'action:backup',
|
||||
], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
||||
@@ -433,7 +433,7 @@ describe('create command', () => {
|
||||
await cmd.parseAsync([
|
||||
'rbac', 'ha-viewer',
|
||||
'--subject', 'User:alice@test.com',
|
||||
'--binding', 'view:servers:my-ha',
|
||||
'--roleBindings', 'role:view,resource:servers,name:my-ha',
|
||||
], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
||||
|
||||
54
src/cli/tests/commands/rbac-bindings.test.ts
Normal file
54
src/cli/tests/commands/rbac-bindings.test.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseRoleBinding } from '../../src/commands/rbac-bindings.js';
|
||||
|
||||
describe('parseRoleBinding', () => {
|
||||
it('parses an unscoped resource binding', () => {
|
||||
expect(parseRoleBinding('role:view,resource:servers')).toEqual({
|
||||
role: 'view',
|
||||
resource: 'servers',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses a name-scoped resource binding', () => {
|
||||
expect(parseRoleBinding('role:view,resource:servers,name:my-ha')).toEqual({
|
||||
role: 'view',
|
||||
resource: 'servers',
|
||||
name: 'my-ha',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses an operation binding via the action shorthand', () => {
|
||||
expect(parseRoleBinding('action:logs')).toEqual({
|
||||
role: 'run',
|
||||
action: 'logs',
|
||||
});
|
||||
});
|
||||
|
||||
it('trims whitespace around keys and values', () => {
|
||||
expect(parseRoleBinding('role: edit , resource: * ')).toEqual({
|
||||
role: 'edit',
|
||||
resource: '*',
|
||||
});
|
||||
});
|
||||
|
||||
it('rejects a pair with no colon', () => {
|
||||
expect(() => parseRoleBinding('role=view')).toThrow(/key:value pairs/);
|
||||
});
|
||||
|
||||
it('rejects an unknown key', () => {
|
||||
expect(() => parseRoleBinding('role:view,resource:servers,scope:project')).toThrow(/Invalid roleBindings key 'scope'/);
|
||||
});
|
||||
|
||||
it('rejects an empty value', () => {
|
||||
expect(() => parseRoleBinding('role:view,resource:')).toThrow(/empty key or value/);
|
||||
});
|
||||
|
||||
it('rejects action combined with resource/name', () => {
|
||||
expect(() => parseRoleBinding('action:logs,resource:servers')).toThrow(/cannot be combined/);
|
||||
});
|
||||
|
||||
it('requires both role and resource when action is absent', () => {
|
||||
expect(() => parseRoleBinding('role:view')).toThrow(/need either 'action/);
|
||||
expect(() => parseRoleBinding('resource:servers')).toThrow(/need either 'action/);
|
||||
});
|
||||
});
|
||||
168
src/cli/tests/commands/test-mcp.test.ts
Normal file
168
src/cli/tests/commands/test-mcp.test.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { createTestCommand } from '../../src/commands/test-mcp.js';
|
||||
|
||||
function makeSession(overrides: Partial<{
|
||||
initialize: () => Promise<unknown>;
|
||||
listTools: () => Promise<Array<{ name: string }>>;
|
||||
callTool: (name: string, args: Record<string, unknown>) => Promise<unknown>;
|
||||
close: () => Promise<void>;
|
||||
}> = {}) {
|
||||
return {
|
||||
initialize: overrides.initialize ?? vi.fn(async () => ({ protocolVersion: '2024-11-05' })),
|
||||
listTools: overrides.listTools ?? vi.fn(async () => [{ name: 'echo' }, { name: 'search' }]),
|
||||
callTool: overrides.callTool ?? vi.fn(async () => ({ content: [{ type: 'text', text: 'hi' }] })),
|
||||
close: overrides.close ?? vi.fn(async () => { /* no-op */ }),
|
||||
};
|
||||
}
|
||||
|
||||
describe('mcpctl test mcp', () => {
|
||||
const output: string[] = [];
|
||||
const log = (...args: unknown[]) => {
|
||||
output.push(args.map(String).join(' '));
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
output.length = 0;
|
||||
process.exitCode = 0;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.exitCode = 0;
|
||||
});
|
||||
|
||||
it('exits 0 on happy path (health + initialize + tools/list)', async () => {
|
||||
const session = makeSession();
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => session,
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(['mcp', 'https://mcp.example.com/projects/foo/mcp'], { from: 'user' });
|
||||
expect(process.exitCode).toBe(0);
|
||||
expect(session.initialize).toHaveBeenCalled();
|
||||
expect(session.listTools).toHaveBeenCalled();
|
||||
expect(output.join('\n')).toContain('Result: PASS');
|
||||
});
|
||||
|
||||
it('exits 1 when the /healthz preflight fails', async () => {
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession(),
|
||||
healthCheck: async () => false,
|
||||
});
|
||||
await cmd.parseAsync(['mcp', 'https://mcp.example.com/projects/foo/mcp'], { from: 'user' });
|
||||
expect(process.exitCode).toBe(1);
|
||||
expect(output.join('\n')).toContain('healthz preflight failed');
|
||||
});
|
||||
|
||||
it('exits 2 (contract fail) when --expect-tools are missing', async () => {
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession({
|
||||
listTools: async () => [{ name: 'echo' }],
|
||||
}),
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(
|
||||
['mcp', 'https://mcp.example.com/projects/foo/mcp', '--expect-tools', 'echo,search'],
|
||||
{ from: 'user' },
|
||||
);
|
||||
expect(process.exitCode).toBe(2);
|
||||
expect(output.join('\n')).toContain('Missing: search');
|
||||
expect(output.join('\n')).toContain('CONTRACT FAIL');
|
||||
});
|
||||
|
||||
it('exits 0 when --expect-tools all match', async () => {
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession({
|
||||
listTools: async () => [{ name: 'echo' }, { name: 'search' }, { name: 'x' }],
|
||||
}),
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(
|
||||
['mcp', 'https://mcp.example.com/projects/foo/mcp', '--expect-tools', 'echo,search'],
|
||||
{ from: 'user' },
|
||||
);
|
||||
expect(process.exitCode).toBe(0);
|
||||
});
|
||||
|
||||
it('exits 1 on transport/auth failure (initialize throws)', async () => {
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession({
|
||||
initialize: async () => { throw new Error('HTTP 401: unauthorized'); },
|
||||
}),
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(['mcp', 'https://mcp.example.com/projects/foo/mcp'], { from: 'user' });
|
||||
expect(process.exitCode).toBe(1);
|
||||
expect(output.join('\n')).toContain('Error:');
|
||||
expect(output.join('\n')).toContain('TRANSPORT/AUTH FAIL');
|
||||
});
|
||||
|
||||
it('invokes --tool with --args and reports isError', async () => {
|
||||
const callTool = vi.fn(async () => ({ content: [{ type: 'text', text: 'oops' }], isError: true }));
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession({ callTool }),
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(
|
||||
['mcp', 'https://mcp.example.com/projects/foo/mcp', '--tool', 'echo', '--args', '{"msg":"hi"}'],
|
||||
{ from: 'user' },
|
||||
);
|
||||
expect(callTool).toHaveBeenCalledWith('echo', { msg: 'hi' });
|
||||
expect(process.exitCode).toBe(2);
|
||||
});
|
||||
|
||||
it('outputs a JSON report with -o json', async () => {
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession(),
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(
|
||||
['mcp', 'https://mcp.example.com/projects/foo/mcp', '-o', 'json'],
|
||||
{ from: 'user' },
|
||||
);
|
||||
const parsed = JSON.parse(output.join('\n')) as { exitCode: number; tools: string[] };
|
||||
expect(parsed.exitCode).toBe(0);
|
||||
expect(parsed.tools).toEqual(['echo', 'search']);
|
||||
});
|
||||
|
||||
it('reads $MCPCTL_TOKEN when --token is not given', async () => {
|
||||
let observedBearer: string | undefined;
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: (_url, opts) => {
|
||||
observedBearer = opts.bearer;
|
||||
return makeSession();
|
||||
},
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
const prev = process.env.MCPCTL_TOKEN;
|
||||
process.env.MCPCTL_TOKEN = 'mcpctl_pat_fromenv';
|
||||
try {
|
||||
await cmd.parseAsync(['mcp', 'https://mcp.example.com/projects/foo/mcp'], { from: 'user' });
|
||||
} finally {
|
||||
if (prev === undefined) delete process.env.MCPCTL_TOKEN;
|
||||
else process.env.MCPCTL_TOKEN = prev;
|
||||
}
|
||||
expect(observedBearer).toBe('mcpctl_pat_fromenv');
|
||||
});
|
||||
|
||||
it('rejects invalid --args as JSON', async () => {
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession(),
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(
|
||||
['mcp', 'https://mcp.example.com/projects/foo/mcp', '--tool', 'echo', '--args', 'not-json'],
|
||||
{ from: 'user' },
|
||||
);
|
||||
expect(process.exitCode).toBe(1);
|
||||
expect(output.join('\n')).toContain('must be valid JSON');
|
||||
});
|
||||
});
|
||||
@@ -25,6 +25,7 @@ model User {
|
||||
auditLogs AuditLog[]
|
||||
ownedProjects Project[]
|
||||
groupMemberships GroupMember[]
|
||||
mcpTokens McpToken[]
|
||||
|
||||
@@index([email])
|
||||
}
|
||||
@@ -187,6 +188,7 @@ model Project {
|
||||
servers ProjectServer[]
|
||||
prompts Prompt[]
|
||||
promptRequests PromptRequest[]
|
||||
mcpTokens McpToken[]
|
||||
|
||||
@@index([name])
|
||||
@@index([ownerId])
|
||||
@@ -204,6 +206,36 @@ model ProjectServer {
|
||||
@@unique([projectId, serverId])
|
||||
}
|
||||
|
||||
// ── MCP Tokens (bearer credentials for HTTP-mode mcplocal) ──
|
||||
//
|
||||
// Raw value format: `mcpctl_pat_<32 base62 chars>`. The raw value is shown
|
||||
// exactly once at create time; only the SHA-256 hash is persisted. Tokens are
|
||||
// scoped to exactly one project — they're only valid at
|
||||
// `/projects/<that-project>/mcp`. Creator's RBAC is the ceiling; the service
|
||||
// rejects bindings that exceed what the creator themselves can do.
|
||||
|
||||
model McpToken {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
projectId String
|
||||
tokenHash String @unique
|
||||
tokenPrefix String
|
||||
ownerId String
|
||||
description String @default("")
|
||||
createdAt DateTime @default(now())
|
||||
expiresAt DateTime?
|
||||
lastUsedAt DateTime?
|
||||
revokedAt DateTime?
|
||||
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
owner User @relation(fields: [ownerId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([name, projectId])
|
||||
@@index([tokenHash])
|
||||
@@index([projectId])
|
||||
@@index([ownerId])
|
||||
}
|
||||
|
||||
// ── MCP Instances (running containers) ──
|
||||
|
||||
model McpInstance {
|
||||
@@ -288,6 +320,8 @@ model AuditEvent {
|
||||
correlationId String?
|
||||
parentEventId String?
|
||||
userName String?
|
||||
tokenName String?
|
||||
tokenSha String?
|
||||
payload Json
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@ -297,6 +331,7 @@ model AuditEvent {
|
||||
@@index([timestamp])
|
||||
@@index([eventKind])
|
||||
@@index([userName])
|
||||
@@index([tokenSha])
|
||||
}
|
||||
|
||||
// ── Backup Pending Queue ──
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
UserRepository,
|
||||
GroupRepository,
|
||||
AuditEventRepository,
|
||||
McpTokenRepository,
|
||||
} from './repositories/index.js';
|
||||
import { PromptRepository } from './repositories/prompt.repository.js';
|
||||
import { PromptRequestRepository } from './repositories/prompt-request.repository.js';
|
||||
@@ -43,6 +44,7 @@ import {
|
||||
UserService,
|
||||
GroupService,
|
||||
AuditEventService,
|
||||
McpTokenService,
|
||||
} from './services/index.js';
|
||||
import type { RbacAction } from './services/index.js';
|
||||
import type { UpdateRbacDefinitionInput } from './validation/rbac-definition.schema.js';
|
||||
@@ -62,6 +64,7 @@ import {
|
||||
registerUserRoutes,
|
||||
registerGroupRoutes,
|
||||
registerAuditEventRoutes,
|
||||
registerMcpTokenRoutes,
|
||||
} from './routes/index.js';
|
||||
import { registerPromptRoutes } from './routes/prompts.js';
|
||||
import { registerGitBackupRoutes } from './routes/git-backup.js';
|
||||
@@ -104,6 +107,7 @@ function mapUrlToPermission(method: string, url: string): PermissionCheck {
|
||||
'mcp': 'servers',
|
||||
'prompts': 'prompts',
|
||||
'promptrequests': 'promptrequests',
|
||||
'mcptokens': 'mcptokens',
|
||||
};
|
||||
|
||||
const resource = resourceMap[segment];
|
||||
@@ -116,6 +120,12 @@ function mapUrlToPermission(method: string, url: string): PermissionCheck {
|
||||
return { kind: 'resource', resource: 'promptrequests', action: 'delete', resourceName: approveMatch[1] };
|
||||
}
|
||||
|
||||
// Special case: /api/v1/mcptokens/:id/revoke → treated as 'delete' on the token.
|
||||
const revokeMatch = url.match(/^\/api\/v1\/mcptokens\/([^/?]+)\/revoke/);
|
||||
if (revokeMatch?.[1]) {
|
||||
return { kind: 'resource', resource: 'mcptokens', action: 'delete', resourceName: revokeMatch[1] };
|
||||
}
|
||||
|
||||
// Special case: /api/v1/projects/:name/prompts/visible → view prompts
|
||||
const visiblePromptsMatch = url.match(/^\/api\/v1\/projects\/([^/?]+)\/prompts\/visible/);
|
||||
if (visiblePromptsMatch?.[1]) {
|
||||
@@ -259,6 +269,7 @@ async function main(): Promise<void> {
|
||||
const rbacDefinitionRepo = new RbacDefinitionRepository(prisma);
|
||||
const userRepo = new UserRepository(prisma);
|
||||
const groupRepo = new GroupRepository(prisma);
|
||||
const mcpTokenRepo = new McpTokenRepository(prisma);
|
||||
|
||||
// CUID detection for RBAC name resolution
|
||||
const CUID_RE = /^c[^\s-]{8,}$/i;
|
||||
@@ -267,6 +278,7 @@ async function main(): Promise<void> {
|
||||
secrets: secretRepo,
|
||||
projects: projectRepo,
|
||||
groups: groupRepo,
|
||||
mcptokens: mcpTokenRepo,
|
||||
};
|
||||
|
||||
// Migrate legacy 'admin' role → granular roles
|
||||
@@ -292,6 +304,7 @@ async function main(): Promise<void> {
|
||||
const mcpProxyService = new McpProxyService(instanceRepo, serverRepo, orchestrator);
|
||||
const rbacDefinitionService = new RbacDefinitionService(rbacDefinitionRepo);
|
||||
const rbacService = new RbacService(rbacDefinitionRepo, prisma);
|
||||
const mcpTokenService = new McpTokenService(mcpTokenRepo, projectRepo, rbacDefinitionRepo, rbacService);
|
||||
const userService = new UserService(userRepo);
|
||||
const groupService = new GroupService(groupRepo, userRepo);
|
||||
const promptRepo = new PromptRepository(prisma);
|
||||
@@ -302,10 +315,28 @@ async function main(): Promise<void> {
|
||||
const backupService = new BackupService(serverRepo, projectRepo, secretRepo, userRepo, groupRepo, rbacDefinitionRepo, promptRepo, templateRepo);
|
||||
const restoreService = new RestoreService(serverRepo, projectRepo, secretRepo, userRepo, groupRepo, rbacDefinitionRepo, promptRepo, templateRepo);
|
||||
|
||||
// Auth middleware for global hooks
|
||||
const authMiddleware = createAuthMiddleware({
|
||||
findSession: (token) => authService.findSession(token),
|
||||
});
|
||||
// Shared auth dependencies. Both the global auth hook and the per-route
|
||||
// preHandler on /api/v1/mcp/proxy must know how to resolve both session
|
||||
// bearers AND mcpctl_pat_ bearers, or mcplocal→mcpd proxy calls with a
|
||||
// McpToken will 401 at the route layer even though the global hook accepts them.
|
||||
const authDeps = {
|
||||
findSession: (token: string) => authService.findSession(token),
|
||||
findMcpToken: async (tokenHash: string) => {
|
||||
const row = await mcpTokenRepo.findByHash(tokenHash);
|
||||
if (row === null) return null;
|
||||
return {
|
||||
tokenId: row.id,
|
||||
tokenName: row.name,
|
||||
tokenSha: row.tokenHash,
|
||||
projectId: row.projectId,
|
||||
projectName: row.project.name,
|
||||
ownerId: row.ownerId,
|
||||
expiresAt: row.expiresAt,
|
||||
revokedAt: row.revokedAt,
|
||||
};
|
||||
},
|
||||
};
|
||||
const authMiddleware = createAuthMiddleware(authDeps);
|
||||
|
||||
// Server
|
||||
const app = await createServer(config, {
|
||||
@@ -329,6 +360,8 @@ async function main(): Promise<void> {
|
||||
const url = request.url;
|
||||
// Skip auth for health, auth, and root
|
||||
if (url.startsWith('/api/v1/auth/') || url === '/healthz' || url === '/health') return;
|
||||
// Introspection authenticates via the McpToken bearer itself — route handles its own auth.
|
||||
if (url.startsWith('/api/v1/mcptokens/introspect')) return;
|
||||
if (!url.startsWith('/api/v1/')) return;
|
||||
|
||||
// Run auth middleware
|
||||
@@ -351,9 +384,28 @@ async function main(): Promise<void> {
|
||||
const saHeader = request.headers['x-service-account'];
|
||||
const serviceAccountName = typeof saHeader === 'string' ? saHeader : undefined;
|
||||
|
||||
// McpToken principal (set by authMiddleware when the bearer was mcpctl_pat_…)
|
||||
const mcpTokenSha = request.mcpToken?.tokenSha;
|
||||
|
||||
// Second layer of project-scope enforcement: a McpToken principal can only
|
||||
// hit resources inside its bound project.
|
||||
if (request.mcpToken !== undefined) {
|
||||
const projectMatch = url.match(/^\/api\/v1\/projects\/([^/?]+)/);
|
||||
if (projectMatch?.[1]) {
|
||||
let targetProjectName = projectMatch[1];
|
||||
if (CUID_RE.test(targetProjectName)) {
|
||||
const entity = await projectRepo.findById(targetProjectName);
|
||||
if (entity) targetProjectName = entity.name;
|
||||
}
|
||||
if (targetProjectName !== request.mcpToken.projectName) {
|
||||
return reply.code(403).send({ error: 'Token is not valid for this project' });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let allowed: boolean;
|
||||
if (check.kind === 'operation') {
|
||||
allowed = await rbacService.canRunOperation(request.userId, check.operation, serviceAccountName);
|
||||
allowed = await rbacService.canRunOperation(request.userId, check.operation, serviceAccountName, mcpTokenSha);
|
||||
} else {
|
||||
// Resolve CUID → human name for name-scoped RBAC bindings
|
||||
if (check.resourceName !== undefined && CUID_RE.test(check.resourceName)) {
|
||||
@@ -363,10 +415,10 @@ async function main(): Promise<void> {
|
||||
if (entity) check.resourceName = entity.name;
|
||||
}
|
||||
}
|
||||
allowed = await rbacService.canAccess(request.userId, check.action, check.resource, check.resourceName, serviceAccountName);
|
||||
allowed = await rbacService.canAccess(request.userId, check.action, check.resource, check.resourceName, serviceAccountName, mcpTokenSha);
|
||||
// Compute scope for list filtering (used by preSerialization hook)
|
||||
if (allowed && check.resourceName === undefined) {
|
||||
request.rbacScope = await rbacService.getAllowedScope(request.userId, check.action, check.resource, serviceAccountName);
|
||||
request.rbacScope = await rbacService.getAllowedScope(request.userId, check.action, check.resource, serviceAccountName, mcpTokenSha);
|
||||
}
|
||||
}
|
||||
if (!allowed) {
|
||||
@@ -388,11 +440,12 @@ async function main(): Promise<void> {
|
||||
registerMcpProxyRoutes(app, {
|
||||
mcpProxyService,
|
||||
auditLogService,
|
||||
authDeps: { findSession: (token) => authService.findSession(token) },
|
||||
authDeps,
|
||||
});
|
||||
registerRbacRoutes(app, rbacDefinitionService);
|
||||
registerUserRoutes(app, userService);
|
||||
registerGroupRoutes(app, groupService);
|
||||
registerMcpTokenRoutes(app, { tokenService: mcpTokenService, projectRepo });
|
||||
registerPromptRoutes(app, promptService, projectRepo);
|
||||
|
||||
// ── Git-based backup ──
|
||||
|
||||
@@ -1,13 +1,41 @@
|
||||
import type { FastifyRequest, FastifyReply } from 'fastify';
|
||||
import { isMcpToken, hashToken } from '@mcpctl/shared';
|
||||
|
||||
export interface McpTokenPrincipal {
|
||||
tokenId: string;
|
||||
tokenName: string;
|
||||
tokenSha: string;
|
||||
projectId: string;
|
||||
projectName: string;
|
||||
ownerId: string;
|
||||
}
|
||||
|
||||
export interface McpTokenLookup {
|
||||
tokenId: string;
|
||||
tokenName: string;
|
||||
tokenSha: string;
|
||||
projectId: string;
|
||||
projectName: string;
|
||||
ownerId: string;
|
||||
expiresAt: Date | null;
|
||||
revokedAt: Date | null;
|
||||
}
|
||||
|
||||
export interface AuthDeps {
|
||||
findSession: (token: string) => Promise<{ userId: string; expiresAt: Date } | null>;
|
||||
/**
|
||||
* Look up an McpToken by SHA-256 hash. Optional — when absent, Bearer tokens
|
||||
* that look like `mcpctl_pat_…` are rejected (400).
|
||||
*/
|
||||
findMcpToken?: (tokenHash: string) => Promise<McpTokenLookup | null>;
|
||||
}
|
||||
|
||||
declare module 'fastify' {
|
||||
interface FastifyRequest {
|
||||
userId?: string;
|
||||
rbacScope?: { wildcard: boolean; names: Set<string> };
|
||||
/** Set by the auth hook when the caller authenticated via a McpToken bearer (prefix `mcpctl_pat_`). */
|
||||
mcpToken?: McpTokenPrincipal;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +53,37 @@ export function createAuthMiddleware(deps: AuthDeps) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Dispatch on the prefix: `mcpctl_pat_…` → McpToken path; anything else → session path.
|
||||
if (isMcpToken(token)) {
|
||||
if (deps.findMcpToken === undefined) {
|
||||
reply.code(401).send({ error: 'McpToken auth not enabled' });
|
||||
return;
|
||||
}
|
||||
const row = await deps.findMcpToken(hashToken(token));
|
||||
if (row === null) {
|
||||
reply.code(401).send({ error: 'Invalid token' });
|
||||
return;
|
||||
}
|
||||
if (row.revokedAt !== null) {
|
||||
reply.code(401).send({ error: 'Token revoked' });
|
||||
return;
|
||||
}
|
||||
if (row.expiresAt !== null && row.expiresAt < new Date()) {
|
||||
reply.code(401).send({ error: 'Token expired' });
|
||||
return;
|
||||
}
|
||||
request.userId = row.ownerId;
|
||||
request.mcpToken = {
|
||||
tokenId: row.tokenId,
|
||||
tokenName: row.tokenName,
|
||||
tokenSha: row.tokenSha,
|
||||
projectId: row.projectId,
|
||||
projectName: row.projectName,
|
||||
ownerId: row.ownerId,
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
const session = await deps.findSession(token);
|
||||
if (session === null) {
|
||||
reply.code(401).send({ error: 'Invalid token' });
|
||||
|
||||
@@ -30,6 +30,8 @@ export class AuditEventRepository implements IAuditEventRepository {
|
||||
correlationId: e.correlationId ?? null,
|
||||
parentEventId: e.parentEventId ?? null,
|
||||
userName: e.userName ?? null,
|
||||
tokenName: e.tokenName ?? null,
|
||||
tokenSha: e.tokenSha ?? null,
|
||||
payload: e.payload as Prisma.InputJsonValue,
|
||||
}));
|
||||
const result = await this.prisma.auditEvent.createMany({ data });
|
||||
@@ -132,6 +134,8 @@ function buildWhere(filter?: AuditEventFilter): Prisma.AuditEventWhereInput {
|
||||
if (filter.serverName !== undefined) where.serverName = filter.serverName;
|
||||
if (filter.correlationId !== undefined) where.correlationId = filter.correlationId;
|
||||
if (filter.userName !== undefined) where.userName = filter.userName;
|
||||
if (filter.tokenName !== undefined) where.tokenName = filter.tokenName;
|
||||
if (filter.tokenSha !== undefined) where.tokenSha = filter.tokenSha;
|
||||
|
||||
if (filter.from !== undefined || filter.to !== undefined) {
|
||||
const timestamp: Prisma.DateTimeFilter = {};
|
||||
|
||||
@@ -15,3 +15,5 @@ export type { IGroupRepository, GroupWithMembers } from './group.repository.js';
|
||||
export { GroupRepository } from './group.repository.js';
|
||||
export type { IAuditEventRepository, AuditEventFilter, AuditEventCreateInput } from './interfaces.js';
|
||||
export { AuditEventRepository } from './audit-event.repository.js';
|
||||
export type { IMcpTokenRepository, McpTokenFilter, McpTokenWithRelations, CreateMcpTokenRepoInput } from './interfaces.js';
|
||||
export { McpTokenRepository } from './mcp-token.repository.js';
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { McpServer, McpInstance, AuditLog, AuditEvent, Secret, InstanceStatus } from '@prisma/client';
|
||||
import type { McpServer, McpInstance, AuditLog, AuditEvent, McpToken, Secret, InstanceStatus } from '@prisma/client';
|
||||
import type { CreateMcpServerInput, UpdateMcpServerInput } from '../validation/mcp-server.schema.js';
|
||||
import type { CreateSecretInput, UpdateSecretInput } from '../validation/secret.schema.js';
|
||||
|
||||
@@ -57,6 +57,8 @@ export interface AuditEventFilter {
|
||||
serverName?: string;
|
||||
correlationId?: string;
|
||||
userName?: string;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
from?: Date;
|
||||
to?: Date;
|
||||
limit?: number;
|
||||
@@ -74,6 +76,8 @@ export interface AuditEventCreateInput {
|
||||
correlationId?: string;
|
||||
parentEventId?: string;
|
||||
userName?: string;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
payload: Record<string, unknown>;
|
||||
}
|
||||
|
||||
@@ -95,3 +99,37 @@ export interface IAuditEventRepository {
|
||||
listSessions(filter?: { projectName?: string; userName?: string; from?: Date; to?: Date; limit?: number; offset?: number }): Promise<AuditSessionSummary[]>;
|
||||
countSessions(filter?: { projectName?: string; userName?: string; from?: Date; to?: Date }): Promise<number>;
|
||||
}
|
||||
|
||||
// ── MCP Tokens ──
|
||||
|
||||
export interface McpTokenFilter {
|
||||
projectId?: string;
|
||||
ownerId?: string;
|
||||
includeRevoked?: boolean;
|
||||
}
|
||||
|
||||
export interface CreateMcpTokenRepoInput {
|
||||
name: string;
|
||||
projectId: string;
|
||||
ownerId: string;
|
||||
tokenHash: string;
|
||||
tokenPrefix: string;
|
||||
description?: string;
|
||||
expiresAt?: Date | null;
|
||||
}
|
||||
|
||||
export type McpTokenWithRelations = McpToken & {
|
||||
project: { id: string; name: string };
|
||||
owner: { id: string; email: string };
|
||||
};
|
||||
|
||||
export interface IMcpTokenRepository {
|
||||
findAll(filter?: McpTokenFilter): Promise<McpTokenWithRelations[]>;
|
||||
findById(id: string): Promise<McpTokenWithRelations | null>;
|
||||
findByHash(tokenHash: string): Promise<McpTokenWithRelations | null>;
|
||||
findByNameAndProject(name: string, projectId: string): Promise<McpTokenWithRelations | null>;
|
||||
create(data: CreateMcpTokenRepoInput): Promise<McpTokenWithRelations>;
|
||||
revoke(id: string): Promise<McpTokenWithRelations>;
|
||||
touchLastUsed(id: string): Promise<void>;
|
||||
delete(id: string): Promise<void>;
|
||||
}
|
||||
|
||||
83
src/mcpd/src/repositories/mcp-token.repository.ts
Normal file
83
src/mcpd/src/repositories/mcp-token.repository.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import type { PrismaClient } from '@prisma/client';
|
||||
import type {
|
||||
IMcpTokenRepository,
|
||||
McpTokenFilter,
|
||||
McpTokenWithRelations,
|
||||
CreateMcpTokenRepoInput,
|
||||
} from './interfaces.js';
|
||||
|
||||
const INCLUDE_RELATIONS = {
|
||||
project: { select: { id: true, name: true } },
|
||||
owner: { select: { id: true, email: true } },
|
||||
} as const;
|
||||
|
||||
export class McpTokenRepository implements IMcpTokenRepository {
|
||||
constructor(private readonly prisma: PrismaClient) {}
|
||||
|
||||
async findAll(filter?: McpTokenFilter): Promise<McpTokenWithRelations[]> {
|
||||
const where: Record<string, unknown> = {};
|
||||
if (filter?.projectId !== undefined) where['projectId'] = filter.projectId;
|
||||
if (filter?.ownerId !== undefined) where['ownerId'] = filter.ownerId;
|
||||
if (!filter?.includeRevoked) where['revokedAt'] = null;
|
||||
return this.prisma.mcpToken.findMany({
|
||||
where,
|
||||
include: INCLUDE_RELATIONS,
|
||||
orderBy: { createdAt: 'desc' },
|
||||
}) as Promise<McpTokenWithRelations[]>;
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<McpTokenWithRelations | null> {
|
||||
return this.prisma.mcpToken.findUnique({
|
||||
where: { id },
|
||||
include: INCLUDE_RELATIONS,
|
||||
}) as Promise<McpTokenWithRelations | null>;
|
||||
}
|
||||
|
||||
async findByHash(tokenHash: string): Promise<McpTokenWithRelations | null> {
|
||||
return this.prisma.mcpToken.findUnique({
|
||||
where: { tokenHash },
|
||||
include: INCLUDE_RELATIONS,
|
||||
}) as Promise<McpTokenWithRelations | null>;
|
||||
}
|
||||
|
||||
async findByNameAndProject(name: string, projectId: string): Promise<McpTokenWithRelations | null> {
|
||||
return this.prisma.mcpToken.findUnique({
|
||||
where: { name_projectId: { name, projectId } },
|
||||
include: INCLUDE_RELATIONS,
|
||||
}) as Promise<McpTokenWithRelations | null>;
|
||||
}
|
||||
|
||||
async create(data: CreateMcpTokenRepoInput): Promise<McpTokenWithRelations> {
|
||||
return this.prisma.mcpToken.create({
|
||||
data: {
|
||||
name: data.name,
|
||||
projectId: data.projectId,
|
||||
ownerId: data.ownerId,
|
||||
tokenHash: data.tokenHash,
|
||||
tokenPrefix: data.tokenPrefix,
|
||||
description: data.description ?? '',
|
||||
expiresAt: data.expiresAt ?? null,
|
||||
},
|
||||
include: INCLUDE_RELATIONS,
|
||||
}) as Promise<McpTokenWithRelations>;
|
||||
}
|
||||
|
||||
async revoke(id: string): Promise<McpTokenWithRelations> {
|
||||
return this.prisma.mcpToken.update({
|
||||
where: { id },
|
||||
data: { revokedAt: new Date() },
|
||||
include: INCLUDE_RELATIONS,
|
||||
}) as Promise<McpTokenWithRelations>;
|
||||
}
|
||||
|
||||
async touchLastUsed(id: string): Promise<void> {
|
||||
await this.prisma.mcpToken.update({
|
||||
where: { id },
|
||||
data: { lastUsedAt: new Date() },
|
||||
});
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
await this.prisma.mcpToken.delete({ where: { id } });
|
||||
}
|
||||
}
|
||||
@@ -18,3 +18,5 @@ export { registerRbacRoutes } from './rbac-definitions.js';
|
||||
export { registerUserRoutes } from './users.js';
|
||||
export { registerGroupRoutes } from './groups.js';
|
||||
export { registerAuditEventRoutes } from './audit-events.js';
|
||||
export { registerMcpTokenRoutes } from './mcp-tokens.js';
|
||||
export type { McpTokenRouteDeps } from './mcp-tokens.js';
|
||||
|
||||
142
src/mcpd/src/routes/mcp-tokens.ts
Normal file
142
src/mcpd/src/routes/mcp-tokens.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import type { FastifyInstance, FastifyReply, FastifyRequest } from 'fastify';
|
||||
import { isMcpToken } from '@mcpctl/shared';
|
||||
import type { McpTokenService } from '../services/mcp-token.service.js';
|
||||
import { PermissionCeilingError } from '../services/mcp-token.service.js';
|
||||
import { NotFoundError, ConflictError } from '../services/mcp-server.service.js';
|
||||
import type { IProjectRepository } from '../repositories/project.repository.js';
|
||||
|
||||
export interface McpTokenRouteDeps {
|
||||
tokenService: McpTokenService;
|
||||
projectRepo: IProjectRepository;
|
||||
}
|
||||
|
||||
export function registerMcpTokenRoutes(app: FastifyInstance, deps: McpTokenRouteDeps): void {
|
||||
const { tokenService, projectRepo } = deps;
|
||||
|
||||
// ── List ─────────────────────────────────────────────────────────────
|
||||
app.get<{ Querystring: { projectId?: string; projectName?: string; includeRevoked?: string } }>(
|
||||
'/api/v1/mcptokens',
|
||||
async (request) => {
|
||||
const { projectId, projectName, includeRevoked } = request.query;
|
||||
|
||||
// Allow filtering by project name for CLI ergonomics.
|
||||
let resolvedProjectId = projectId;
|
||||
if (resolvedProjectId === undefined && projectName !== undefined) {
|
||||
const project = await projectRepo.findByName(projectName);
|
||||
if (project === null) throw new NotFoundError(`Project not found: ${projectName}`);
|
||||
resolvedProjectId = project.id;
|
||||
}
|
||||
|
||||
const filter: { projectId?: string; includeRevoked?: boolean } = {};
|
||||
if (resolvedProjectId !== undefined) filter.projectId = resolvedProjectId;
|
||||
if (includeRevoked === 'true') filter.includeRevoked = true;
|
||||
|
||||
const rows = await tokenService.list(filter);
|
||||
return rows.map(toListResponse);
|
||||
},
|
||||
);
|
||||
|
||||
// ── Describe ─────────────────────────────────────────────────────────
|
||||
app.get<{ Params: { id: string } }>('/api/v1/mcptokens/:id', async (request) => {
|
||||
const row = await tokenService.getById(request.params.id);
|
||||
return toListResponse(row);
|
||||
});
|
||||
|
||||
// ── Create ───────────────────────────────────────────────────────────
|
||||
app.post('/api/v1/mcptokens', async (request, reply) => {
|
||||
const userId = request.userId;
|
||||
if (userId === undefined) {
|
||||
reply.code(401);
|
||||
return { error: 'Not authenticated' };
|
||||
}
|
||||
|
||||
try {
|
||||
// Accept projectName OR projectId for CLI ergonomics.
|
||||
const body = (request.body ?? {}) as Record<string, unknown>;
|
||||
if (typeof body['projectName'] === 'string' && typeof body['projectId'] !== 'string') {
|
||||
const project = await projectRepo.findByName(body['projectName']);
|
||||
if (project === null) throw new NotFoundError(`Project not found: ${body['projectName']}`);
|
||||
body['projectId'] = project.id;
|
||||
}
|
||||
|
||||
const result = await tokenService.create(userId, body);
|
||||
reply.code(201);
|
||||
return {
|
||||
...toListResponse(result.mcpToken),
|
||||
token: result.raw,
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) {
|
||||
reply.code(404);
|
||||
return { error: err.message };
|
||||
}
|
||||
if (err instanceof ConflictError) {
|
||||
reply.code(409);
|
||||
return { error: err.message };
|
||||
}
|
||||
if (err instanceof PermissionCeilingError) {
|
||||
reply.code(403);
|
||||
return { error: err.message };
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
||||
// ── Revoke (soft-delete) ────────────────────────────────────────────
|
||||
app.post<{ Params: { id: string } }>('/api/v1/mcptokens/:id/revoke', async (request) => {
|
||||
const row = await tokenService.revoke(request.params.id);
|
||||
return toListResponse(row);
|
||||
});
|
||||
|
||||
// ── Delete (hard) ────────────────────────────────────────────────────
|
||||
app.delete<{ Params: { id: string } }>('/api/v1/mcptokens/:id', async (request, reply) => {
|
||||
await tokenService.delete(request.params.id);
|
||||
reply.code(204);
|
||||
});
|
||||
|
||||
// ── Introspect ───────────────────────────────────────────────────────
|
||||
// Called by mcplocal's HTTP-mode auth preHandler to resolve a raw bearer
|
||||
// to principal info. Accepts a McpToken bearer directly — bypasses the
|
||||
// session-auth path.
|
||||
app.get('/api/v1/mcptokens/introspect', async (request: FastifyRequest, reply: FastifyReply) => {
|
||||
const header = request.headers.authorization;
|
||||
if (header === undefined || !header.startsWith('Bearer ')) {
|
||||
reply.code(401);
|
||||
return { ok: false, error: 'Missing Authorization' };
|
||||
}
|
||||
const token = header.slice(7);
|
||||
if (!isMcpToken(token)) {
|
||||
reply.code(401);
|
||||
return { ok: false, error: 'Not a mcptoken bearer' };
|
||||
}
|
||||
const result = await tokenService.introspectRaw(token);
|
||||
if (!result.ok) {
|
||||
reply.code(401);
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
function toListResponse(row: import('../repositories/interfaces.js').McpTokenWithRelations): Record<string, unknown> {
|
||||
return {
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
projectId: row.projectId,
|
||||
projectName: row.project.name,
|
||||
tokenPrefix: row.tokenPrefix,
|
||||
ownerId: row.ownerId,
|
||||
ownerEmail: row.owner.email,
|
||||
description: row.description,
|
||||
createdAt: row.createdAt,
|
||||
expiresAt: row.expiresAt,
|
||||
lastUsedAt: row.lastUsedAt,
|
||||
revokedAt: row.revokedAt,
|
||||
status: statusOf(row),
|
||||
};
|
||||
}
|
||||
|
||||
function statusOf(row: import('../repositories/interfaces.js').McpTokenWithRelations): 'active' | 'revoked' | 'expired' {
|
||||
if (row.revokedAt !== null) return 'revoked';
|
||||
if (row.expiresAt !== null && row.expiresAt < new Date()) return 'expired';
|
||||
return 'active';
|
||||
}
|
||||
@@ -9,6 +9,8 @@ export interface AuditEventQueryParams {
|
||||
serverName?: string;
|
||||
correlationId?: string;
|
||||
userName?: string;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
from?: string;
|
||||
to?: string;
|
||||
limit?: number;
|
||||
@@ -71,6 +73,8 @@ export class AuditEventService {
|
||||
if (params.serverName !== undefined) filter.serverName = params.serverName;
|
||||
if (params.correlationId !== undefined) filter.correlationId = params.correlationId;
|
||||
if (params.userName !== undefined) filter.userName = params.userName;
|
||||
if (params.tokenName !== undefined) filter.tokenName = params.tokenName;
|
||||
if (params.tokenSha !== undefined) filter.tokenSha = params.tokenSha;
|
||||
if (params.from !== undefined) filter.from = new Date(params.from);
|
||||
if (params.to !== undefined) filter.to = new Date(params.to);
|
||||
if (params.limit !== undefined) filter.limit = params.limit;
|
||||
|
||||
@@ -34,3 +34,5 @@ export { UserService } from './user.service.js';
|
||||
export { GroupService } from './group.service.js';
|
||||
export { AuditEventService } from './audit-event.service.js';
|
||||
export type { AuditEventQueryParams } from './audit-event.service.js';
|
||||
export { McpTokenService, PermissionCeilingError } from './mcp-token.service.js';
|
||||
export type { CreateMcpTokenResult, IntrospectResult } from './mcp-token.service.js';
|
||||
|
||||
222
src/mcpd/src/services/mcp-token.service.ts
Normal file
222
src/mcpd/src/services/mcp-token.service.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
import { generateToken, hashToken } from '@mcpctl/shared';
|
||||
import type { McpToken } from '@prisma/client';
|
||||
import type { IMcpTokenRepository, McpTokenWithRelations, McpTokenFilter } from '../repositories/interfaces.js';
|
||||
import type { IRbacDefinitionRepository } from '../repositories/rbac-definition.repository.js';
|
||||
import type { IProjectRepository } from '../repositories/project.repository.js';
|
||||
import { CreateMcpTokenSchema } from '../validation/mcp-token.schema.js';
|
||||
import { isResourceBinding, type RbacRoleBinding, type RbacSubject } from '../validation/rbac-definition.schema.js';
|
||||
import type { RbacService, Permission } from './rbac.service.js';
|
||||
import { ROLE_ACTIONS_FOR_CEILING } from './rbac.service.js';
|
||||
import { NotFoundError, ConflictError } from './mcp-server.service.js';
|
||||
|
||||
/** Thrown when the requesting user tries to mint a token with bindings they cannot grant themselves. */
|
||||
export class PermissionCeilingError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'PermissionCeilingError';
|
||||
}
|
||||
}
|
||||
|
||||
export interface CreateMcpTokenResult {
|
||||
/** The database row (with project/owner relations). */
|
||||
mcpToken: McpTokenWithRelations;
|
||||
/** The raw bearer token — shown exactly once. */
|
||||
raw: string;
|
||||
}
|
||||
|
||||
export interface IntrospectResult {
|
||||
ok: boolean;
|
||||
tokenId?: string;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
projectId?: string;
|
||||
projectName?: string;
|
||||
ownerId?: string;
|
||||
expired?: boolean;
|
||||
revoked?: boolean;
|
||||
}
|
||||
|
||||
export class McpTokenService {
|
||||
constructor(
|
||||
private readonly tokenRepo: IMcpTokenRepository,
|
||||
private readonly projectRepo: IProjectRepository,
|
||||
private readonly rbacRepo: IRbacDefinitionRepository,
|
||||
private readonly rbacService: RbacService,
|
||||
) {}
|
||||
|
||||
async list(filter?: McpTokenFilter): Promise<McpTokenWithRelations[]> {
|
||||
return this.tokenRepo.findAll(filter);
|
||||
}
|
||||
|
||||
async getById(id: string): Promise<McpTokenWithRelations> {
|
||||
const row = await this.tokenRepo.findById(id);
|
||||
if (row === null) throw new NotFoundError(`McpToken not found: ${id}`);
|
||||
return row;
|
||||
}
|
||||
|
||||
/** Hash + lookup a raw bearer. Returns the row if valid and active; null if missing, revoked, or expired. */
|
||||
async introspectRaw(raw: string): Promise<IntrospectResult> {
|
||||
const hash = hashToken(raw);
|
||||
const row = await this.tokenRepo.findByHash(hash);
|
||||
if (row === null) return { ok: false };
|
||||
|
||||
const now = new Date();
|
||||
const revoked = row.revokedAt !== null;
|
||||
const expired = row.expiresAt !== null && row.expiresAt < now;
|
||||
|
||||
if (revoked || expired) {
|
||||
return {
|
||||
ok: false,
|
||||
tokenId: row.id,
|
||||
tokenName: row.name,
|
||||
tokenSha: row.tokenHash,
|
||||
revoked,
|
||||
expired,
|
||||
};
|
||||
}
|
||||
|
||||
// Best-effort last-used tracking (don't block on this).
|
||||
this.tokenRepo.touchLastUsed(row.id).catch(() => { /* ignore */ });
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
tokenId: row.id,
|
||||
tokenName: row.name,
|
||||
tokenSha: row.tokenHash,
|
||||
projectId: row.projectId,
|
||||
projectName: row.project.name,
|
||||
ownerId: row.ownerId,
|
||||
expired: false,
|
||||
revoked: false,
|
||||
};
|
||||
}
|
||||
|
||||
async create(creatorUserId: string, input: unknown): Promise<CreateMcpTokenResult> {
|
||||
const data = CreateMcpTokenSchema.parse(input);
|
||||
|
||||
const project = await this.projectRepo.findById(data.projectId);
|
||||
if (project === null) throw new NotFoundError(`Project not found: ${data.projectId}`);
|
||||
|
||||
const existing = await this.tokenRepo.findByNameAndProject(data.name, data.projectId);
|
||||
if (existing !== null && existing.revokedAt === null) {
|
||||
throw new ConflictError(`McpToken already exists: ${data.name} in project ${project.name}`);
|
||||
}
|
||||
|
||||
// Resolve the effective bindings:
|
||||
// base = rbacMode === 'clone' ? snapshot(creator) : []
|
||||
// effective = base + explicit bindings
|
||||
const basePerms = data.rbacMode === 'clone'
|
||||
? await this.rbacService.getPermissions(creatorUserId)
|
||||
: [];
|
||||
const baseBindings = basePerms.map(permissionToBinding);
|
||||
const effectiveBindings: RbacRoleBinding[] = [...baseBindings, ...data.bindings];
|
||||
|
||||
// Creator ceiling: every effective binding must be within what creator can do.
|
||||
// Cloned bindings are trivially satisfied; explicit ones may not be.
|
||||
for (const binding of data.bindings) {
|
||||
const violation = await this.checkCeiling(creatorUserId, binding);
|
||||
if (violation !== null) throw new PermissionCeilingError(violation);
|
||||
}
|
||||
|
||||
// Generate the token
|
||||
const { raw, hash, prefix } = generateToken();
|
||||
|
||||
// Normalize expiresAt
|
||||
let expiresAt: Date | null = null;
|
||||
if (data.expiresAt !== undefined && data.expiresAt !== null) {
|
||||
expiresAt = typeof data.expiresAt === 'string' ? new Date(data.expiresAt) : data.expiresAt;
|
||||
}
|
||||
|
||||
const createArgs: {
|
||||
name: string;
|
||||
projectId: string;
|
||||
ownerId: string;
|
||||
tokenHash: string;
|
||||
tokenPrefix: string;
|
||||
description?: string;
|
||||
expiresAt: Date | null;
|
||||
} = {
|
||||
name: data.name,
|
||||
projectId: data.projectId,
|
||||
ownerId: creatorUserId,
|
||||
tokenHash: hash,
|
||||
tokenPrefix: prefix,
|
||||
expiresAt,
|
||||
};
|
||||
if (data.description !== undefined) createArgs.description = data.description;
|
||||
const row = await this.tokenRepo.create(createArgs);
|
||||
|
||||
// If the token has bindings, auto-create an RbacDefinition so the token is a real RBAC principal.
|
||||
if (effectiveBindings.length > 0) {
|
||||
const subject: RbacSubject = { kind: 'McpToken', name: hash };
|
||||
await this.rbacRepo.create({
|
||||
name: rbacDefNameFor(row),
|
||||
subjects: [subject],
|
||||
roleBindings: effectiveBindings,
|
||||
});
|
||||
}
|
||||
|
||||
return { mcpToken: row, raw };
|
||||
}
|
||||
|
||||
async revoke(id: string): Promise<McpTokenWithRelations> {
|
||||
const existing = await this.getById(id);
|
||||
const row = await this.tokenRepo.revoke(id);
|
||||
// Remove the RBAC definition so the token's bindings stop resolving immediately.
|
||||
await this.deleteRbacDefinitionFor(existing).catch(() => { /* ignore */ });
|
||||
return row;
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
const existing = await this.getById(id);
|
||||
await this.deleteRbacDefinitionFor(existing).catch(() => { /* ignore */ });
|
||||
await this.tokenRepo.delete(id);
|
||||
}
|
||||
|
||||
private async deleteRbacDefinitionFor(row: McpToken): Promise<void> {
|
||||
const name = rbacDefNameFor(row);
|
||||
const existing = await this.rbacRepo.findByName(name);
|
||||
if (existing === null) return;
|
||||
await this.rbacRepo.delete(existing.id);
|
||||
}
|
||||
|
||||
/**
|
||||
* For a single requested binding, return null if the creator can grant it,
|
||||
* or a human-readable reason string if they cannot.
|
||||
*/
|
||||
private async checkCeiling(creatorUserId: string, binding: RbacRoleBinding): Promise<string | null> {
|
||||
if (isResourceBinding(binding)) {
|
||||
const grantedActions = ROLE_ACTIONS_FOR_CEILING[binding.role] ?? [];
|
||||
for (const action of grantedActions) {
|
||||
const ok = await this.rbacService.canAccess(
|
||||
creatorUserId,
|
||||
action,
|
||||
binding.resource,
|
||||
binding.name,
|
||||
);
|
||||
if (!ok) {
|
||||
return `Ceiling violation: you do not have permission '${action}' on ${binding.resource}${binding.name !== undefined ? `/${binding.name}` : ''}`;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
// Operation binding
|
||||
const ok = await this.rbacService.canRunOperation(creatorUserId, binding.action);
|
||||
if (!ok) return `Ceiling violation: you cannot run operation '${binding.action}'`;
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function permissionToBinding(p: Permission): RbacRoleBinding {
|
||||
if ('resource' in p) {
|
||||
return p.name !== undefined
|
||||
? { role: p.role as RbacRoleBinding extends { role: infer R } ? R : never, resource: p.resource, name: p.name } as RbacRoleBinding
|
||||
: { role: p.role, resource: p.resource } as RbacRoleBinding;
|
||||
}
|
||||
return { role: 'run', action: p.action };
|
||||
}
|
||||
|
||||
function rbacDefNameFor(row: { id: string }): string {
|
||||
// Must match the regex in CreateRbacDefinitionSchema (lowercase alphanumeric with hyphens).
|
||||
return `mcptoken-${row.id.toLowerCase()}`;
|
||||
}
|
||||
@@ -38,6 +38,9 @@ const ROLE_ACTIONS: Record<string, readonly RbacAction[]> = {
|
||||
expose: ['expose', 'view'],
|
||||
};
|
||||
|
||||
/** Exported alias for permission-ceiling checks elsewhere (e.g. McpTokenService). */
|
||||
export const ROLE_ACTIONS_FOR_CEILING = ROLE_ACTIONS;
|
||||
|
||||
export class RbacService {
|
||||
constructor(
|
||||
private readonly rbacRepo: IRbacDefinitionRepository,
|
||||
@@ -50,8 +53,8 @@ export class RbacService {
|
||||
* If provided, name-scoped bindings only match when their name equals this.
|
||||
* If omitted (listing), name-scoped bindings still grant access.
|
||||
*/
|
||||
async canAccess(userId: string, action: RbacAction, resource: string, resourceName?: string, serviceAccountName?: string): Promise<boolean> {
|
||||
const permissions = await this.getPermissions(userId, serviceAccountName);
|
||||
async canAccess(userId: string, action: RbacAction, resource: string, resourceName?: string, serviceAccountName?: string, mcpTokenSha?: string): Promise<boolean> {
|
||||
const permissions = await this.getPermissions(userId, serviceAccountName, mcpTokenSha);
|
||||
const normalized = normalizeResource(resource);
|
||||
|
||||
for (const perm of permissions) {
|
||||
@@ -73,8 +76,8 @@ export class RbacService {
|
||||
* Check whether a user is allowed to perform a named operation.
|
||||
* Operations require an explicit 'run' role binding with a matching action.
|
||||
*/
|
||||
async canRunOperation(userId: string, operation: string, serviceAccountName?: string): Promise<boolean> {
|
||||
const permissions = await this.getPermissions(userId, serviceAccountName);
|
||||
async canRunOperation(userId: string, operation: string, serviceAccountName?: string, mcpTokenSha?: string): Promise<boolean> {
|
||||
const permissions = await this.getPermissions(userId, serviceAccountName, mcpTokenSha);
|
||||
|
||||
for (const perm of permissions) {
|
||||
if ('action' in perm && perm.role === 'run' && perm.action === operation) {
|
||||
@@ -90,8 +93,8 @@ export class RbacService {
|
||||
* Returns wildcard:true if any matching binding is unscoped (no name constraint).
|
||||
* Returns wildcard:false with a set of allowed names if all bindings are name-scoped.
|
||||
*/
|
||||
async getAllowedScope(userId: string, action: RbacAction, resource: string, serviceAccountName?: string): Promise<AllowedScope> {
|
||||
const permissions = await this.getPermissions(userId, serviceAccountName);
|
||||
async getAllowedScope(userId: string, action: RbacAction, resource: string, serviceAccountName?: string, mcpTokenSha?: string): Promise<AllowedScope> {
|
||||
const permissions = await this.getPermissions(userId, serviceAccountName, mcpTokenSha);
|
||||
const normalized = normalizeResource(resource);
|
||||
const names = new Set<string>();
|
||||
|
||||
@@ -113,13 +116,13 @@ export class RbacService {
|
||||
/**
|
||||
* Collect all permissions for a user across all matching RbacDefinitions.
|
||||
*/
|
||||
async getPermissions(userId: string, serviceAccountName?: string): Promise<Permission[]> {
|
||||
async getPermissions(userId: string, serviceAccountName?: string, mcpTokenSha?: string): Promise<Permission[]> {
|
||||
// 1. Resolve user email
|
||||
const user = await this.prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
select: { email: true },
|
||||
});
|
||||
if (user === null && serviceAccountName === undefined) return [];
|
||||
if (user === null && serviceAccountName === undefined && mcpTokenSha === undefined) return [];
|
||||
|
||||
// 2. Resolve group names the user belongs to
|
||||
let groupNames: string[] = [];
|
||||
@@ -142,6 +145,7 @@ export class RbacService {
|
||||
if (s.kind === 'User') return user !== null && s.name === user.email;
|
||||
if (s.kind === 'Group') return groupNames.includes(s.name);
|
||||
if (s.kind === 'ServiceAccount') return serviceAccountName !== undefined && s.name === serviceAccountName;
|
||||
if (s.kind === 'McpToken') return mcpTokenSha !== undefined && s.name === mcpTokenSha;
|
||||
return false;
|
||||
});
|
||||
|
||||
|
||||
21
src/mcpd/src/validation/mcp-token.schema.ts
Normal file
21
src/mcpd/src/validation/mcp-token.schema.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { z } from 'zod';
|
||||
import { RbacRoleBindingSchema } from './rbac-definition.schema.js';
|
||||
|
||||
export const McpTokenRbacMode = z.enum(['empty', 'clone']);
|
||||
export type McpTokenRbacMode = z.infer<typeof McpTokenRbacMode>;
|
||||
|
||||
export const CreateMcpTokenSchema = z.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(100)
|
||||
.regex(/^[a-z0-9-]+$/, 'Name must be lowercase alphanumeric with hyphens'),
|
||||
projectId: z.string().min(1),
|
||||
description: z.string().optional(),
|
||||
expiresAt: z.union([z.string().datetime(), z.date(), z.null()]).optional(),
|
||||
rbacMode: McpTokenRbacMode.default('empty'),
|
||||
/** Explicit bindings, added on top of the `rbacMode` base (empty or clone). */
|
||||
bindings: z.array(RbacRoleBindingSchema).default([]),
|
||||
});
|
||||
|
||||
export type CreateMcpTokenInput = z.infer<typeof CreateMcpTokenSchema>;
|
||||
@@ -1,7 +1,7 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export const RBAC_ROLES = ['edit', 'view', 'create', 'delete', 'run', 'expose'] as const;
|
||||
export const RBAC_RESOURCES = ['*', 'servers', 'instances', 'secrets', 'projects', 'templates', 'users', 'groups', 'rbac', 'prompts', 'promptrequests'] as const;
|
||||
export const RBAC_RESOURCES = ['*', 'servers', 'instances', 'secrets', 'projects', 'templates', 'users', 'groups', 'rbac', 'prompts', 'promptrequests', 'mcptokens'] as const;
|
||||
|
||||
/** Singular→plural map for resource names. */
|
||||
const RESOURCE_ALIASES: Record<string, string> = {
|
||||
@@ -14,6 +14,7 @@ const RESOURCE_ALIASES: Record<string, string> = {
|
||||
group: 'groups',
|
||||
prompt: 'prompts',
|
||||
promptrequest: 'promptrequests',
|
||||
mcptoken: 'mcptokens',
|
||||
};
|
||||
|
||||
/** Normalize a resource name to its canonical plural form. */
|
||||
@@ -22,7 +23,7 @@ export function normalizeResource(resource: string): string {
|
||||
}
|
||||
|
||||
export const RbacSubjectSchema = z.object({
|
||||
kind: z.enum(['User', 'Group', 'ServiceAccount']),
|
||||
kind: z.enum(['User', 'Group', 'ServiceAccount', 'McpToken']),
|
||||
name: z.string().min(1),
|
||||
});
|
||||
|
||||
|
||||
@@ -99,3 +99,76 @@ describe('auth middleware', () => {
|
||||
expect(findSession).toHaveBeenCalledWith('my-token');
|
||||
});
|
||||
});
|
||||
|
||||
describe('auth middleware — McpToken dispatch', () => {
|
||||
async function setupAppWithMcpToken(deps: Parameters<typeof createAuthMiddleware>[0]) {
|
||||
app = Fastify({ logger: false });
|
||||
const authMiddleware = createAuthMiddleware(deps);
|
||||
app.addHook('preHandler', authMiddleware);
|
||||
app.get('/protected', async (request) => ({
|
||||
userId: request.userId,
|
||||
mcpToken: request.mcpToken,
|
||||
}));
|
||||
return app.ready();
|
||||
}
|
||||
|
||||
it('routes mcpctl_pat_ bearers to findMcpToken and skips findSession', async () => {
|
||||
const findSession = vi.fn(async () => null);
|
||||
const findMcpToken = vi.fn(async () => ({
|
||||
tokenId: 'ctok1',
|
||||
tokenName: 'mytok',
|
||||
tokenSha: 'deadbeef',
|
||||
projectId: 'cproj1',
|
||||
projectName: 'myproj',
|
||||
ownerId: 'cuser1',
|
||||
expiresAt: null,
|
||||
revokedAt: null,
|
||||
}));
|
||||
await setupAppWithMcpToken({ findSession, findMcpToken });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/protected',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_abcdefghij' },
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(findSession).not.toHaveBeenCalled();
|
||||
expect(findMcpToken).toHaveBeenCalledTimes(1);
|
||||
const body = res.json<{ userId: string; mcpToken: { tokenName: string; projectName: string } }>();
|
||||
expect(body.userId).toBe('cuser1');
|
||||
expect(body.mcpToken.tokenName).toBe('mytok');
|
||||
expect(body.mcpToken.projectName).toBe('myproj');
|
||||
});
|
||||
|
||||
it('returns 401 for a revoked McpToken', async () => {
|
||||
await setupAppWithMcpToken({
|
||||
findSession: async () => null,
|
||||
findMcpToken: async () => ({
|
||||
tokenId: 'ctok1',
|
||||
tokenName: 'mytok',
|
||||
tokenSha: 'x',
|
||||
projectId: 'p',
|
||||
projectName: 'p',
|
||||
ownerId: 'u',
|
||||
expiresAt: null,
|
||||
revokedAt: new Date(),
|
||||
}),
|
||||
});
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/protected',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_revoked' },
|
||||
});
|
||||
expect(res.statusCode).toBe(401);
|
||||
expect(res.json<{ error: string }>().error).toContain('revoked');
|
||||
});
|
||||
|
||||
it('returns 401 when a mcpctl_pat_ bearer arrives but findMcpToken is not configured', async () => {
|
||||
await setupAppWithMcpToken({ findSession: async () => null });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/protected',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_no-lookup-wired' },
|
||||
});
|
||||
expect(res.statusCode).toBe(401);
|
||||
});
|
||||
});
|
||||
|
||||
246
src/mcpd/tests/mcp-token-service.test.ts
Normal file
246
src/mcpd/tests/mcp-token-service.test.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { McpTokenService, PermissionCeilingError } from '../src/services/mcp-token.service.js';
|
||||
import { NotFoundError, ConflictError } from '../src/services/mcp-server.service.js';
|
||||
import type { IMcpTokenRepository, McpTokenWithRelations } from '../src/repositories/interfaces.js';
|
||||
import type { IProjectRepository } from '../src/repositories/project.repository.js';
|
||||
import type { IRbacDefinitionRepository } from '../src/repositories/rbac-definition.repository.js';
|
||||
import type { RbacService } from '../src/services/rbac.service.js';
|
||||
import { hashToken, isMcpToken, TOKEN_PREFIX } from '@mcpctl/shared';
|
||||
|
||||
const PROJECT = { id: 'cproj1', name: 'myproj' };
|
||||
|
||||
function makeRow(overrides: Partial<McpTokenWithRelations> = {}): McpTokenWithRelations {
|
||||
return {
|
||||
id: 'ctok1',
|
||||
name: 'mytok',
|
||||
projectId: PROJECT.id,
|
||||
tokenHash: 'deadbeef',
|
||||
tokenPrefix: 'mcpctl_pat_abcd',
|
||||
ownerId: 'cuser1',
|
||||
description: '',
|
||||
createdAt: new Date(),
|
||||
expiresAt: null,
|
||||
lastUsedAt: null,
|
||||
revokedAt: null,
|
||||
project: PROJECT,
|
||||
owner: { id: 'cuser1', email: 'alice@example.com' },
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function mockTokenRepo(): IMcpTokenRepository {
|
||||
return {
|
||||
findAll: vi.fn(async () => []),
|
||||
findById: vi.fn(async () => null),
|
||||
findByHash: vi.fn(async () => null),
|
||||
findByNameAndProject: vi.fn(async () => null),
|
||||
create: vi.fn(async (input) => makeRow({
|
||||
name: input.name,
|
||||
projectId: input.projectId,
|
||||
tokenHash: input.tokenHash,
|
||||
tokenPrefix: input.tokenPrefix,
|
||||
ownerId: input.ownerId,
|
||||
description: input.description ?? '',
|
||||
expiresAt: input.expiresAt ?? null,
|
||||
})),
|
||||
revoke: vi.fn(async (id) => makeRow({ id, revokedAt: new Date() })),
|
||||
touchLastUsed: vi.fn(async () => {}),
|
||||
delete: vi.fn(async () => {}),
|
||||
};
|
||||
}
|
||||
|
||||
function mockProjectRepo(): IProjectRepository {
|
||||
return {
|
||||
findById: vi.fn(async (id) => (id === PROJECT.id ? PROJECT : null)),
|
||||
findByName: vi.fn(async (name) => (name === PROJECT.name ? PROJECT : null)),
|
||||
// minimal stubs for the rest — not exercised in these tests
|
||||
findAll: vi.fn(async () => []),
|
||||
create: vi.fn(),
|
||||
update: vi.fn(),
|
||||
delete: vi.fn(),
|
||||
attachServer: vi.fn(),
|
||||
detachServer: vi.fn(),
|
||||
listServers: vi.fn(async () => []),
|
||||
} as unknown as IProjectRepository;
|
||||
}
|
||||
|
||||
function mockRbacRepo(): IRbacDefinitionRepository {
|
||||
return {
|
||||
findAll: vi.fn(async () => []),
|
||||
findById: vi.fn(async () => null),
|
||||
findByName: vi.fn(async () => null),
|
||||
create: vi.fn(async () => ({ id: 'rbac-1', name: 'x', subjects: [], roleBindings: [], version: 1, createdAt: new Date(), updatedAt: new Date() })),
|
||||
update: vi.fn(),
|
||||
delete: vi.fn(async () => {}),
|
||||
};
|
||||
}
|
||||
|
||||
function mockRbacService(overrides: Partial<RbacService> = {}): RbacService {
|
||||
return {
|
||||
canAccess: vi.fn(async () => true),
|
||||
canRunOperation: vi.fn(async () => true),
|
||||
getAllowedScope: vi.fn(async () => ({ wildcard: true, names: new Set() })),
|
||||
getPermissions: vi.fn(async () => []),
|
||||
...overrides,
|
||||
} as unknown as RbacService;
|
||||
}
|
||||
|
||||
describe('McpTokenService.create', () => {
|
||||
let tokenRepo: ReturnType<typeof mockTokenRepo>;
|
||||
let projectRepo: IProjectRepository;
|
||||
let rbacRepo: ReturnType<typeof mockRbacRepo>;
|
||||
let rbacService: RbacService;
|
||||
let service: McpTokenService;
|
||||
|
||||
beforeEach(() => {
|
||||
tokenRepo = mockTokenRepo();
|
||||
projectRepo = mockProjectRepo();
|
||||
rbacRepo = mockRbacRepo();
|
||||
rbacService = mockRbacService();
|
||||
service = new McpTokenService(tokenRepo, projectRepo, rbacRepo, rbacService);
|
||||
});
|
||||
|
||||
it('creates a token with no bindings (rbacMode=empty, default)', async () => {
|
||||
const result = await service.create('cuser1', {
|
||||
name: 'mytok',
|
||||
projectId: PROJECT.id,
|
||||
});
|
||||
expect(result.raw).toMatch(new RegExp(`^${TOKEN_PREFIX}`));
|
||||
expect(isMcpToken(result.raw)).toBe(true);
|
||||
expect(tokenRepo.create).toHaveBeenCalledTimes(1);
|
||||
// Hash must be persisted, never raw
|
||||
const args = vi.mocked(tokenRepo.create).mock.calls[0]![0];
|
||||
expect(args.tokenHash).toBe(hashToken(result.raw));
|
||||
expect(args.tokenPrefix).toBe(result.raw.slice(0, 16));
|
||||
// No RBAC definition should be created when there are no bindings
|
||||
expect(rbacRepo.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('creates an RbacDefinition with subject McpToken:<sha> when bindings are given', async () => {
|
||||
const result = await service.create('cuser1', {
|
||||
name: 'mytok',
|
||||
projectId: PROJECT.id,
|
||||
bindings: [{ role: 'view', resource: 'servers' }],
|
||||
});
|
||||
expect(rbacRepo.create).toHaveBeenCalledTimes(1);
|
||||
const defArgs = vi.mocked(rbacRepo.create).mock.calls[0]![0];
|
||||
const subjects = defArgs.subjects as Array<{ kind: string; name: string }>;
|
||||
expect(subjects).toEqual([{ kind: 'McpToken', name: hashToken(result.raw) }]);
|
||||
expect(defArgs.roleBindings).toEqual([{ role: 'view', resource: 'servers' }]);
|
||||
});
|
||||
|
||||
it('rejects bindings the creator does not have (ceiling violation)', async () => {
|
||||
rbacService = mockRbacService({
|
||||
canAccess: vi.fn(async () => false),
|
||||
} as Partial<RbacService>);
|
||||
service = new McpTokenService(tokenRepo, projectRepo, rbacRepo, rbacService);
|
||||
|
||||
await expect(
|
||||
service.create('cuser1', {
|
||||
name: 'mytok',
|
||||
projectId: PROJECT.id,
|
||||
bindings: [{ role: 'edit', resource: 'servers' }],
|
||||
}),
|
||||
).rejects.toThrow(PermissionCeilingError);
|
||||
expect(tokenRepo.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('clones the creator\'s permissions when rbacMode=clone', async () => {
|
||||
rbacService = mockRbacService({
|
||||
getPermissions: vi.fn(async () => [
|
||||
{ role: 'view', resource: 'servers' },
|
||||
{ role: 'run', action: 'logs' },
|
||||
]),
|
||||
} as Partial<RbacService>);
|
||||
service = new McpTokenService(tokenRepo, projectRepo, rbacRepo, rbacService);
|
||||
|
||||
await service.create('cuser1', {
|
||||
name: 'mytok',
|
||||
projectId: PROJECT.id,
|
||||
rbacMode: 'clone',
|
||||
});
|
||||
expect(rbacRepo.create).toHaveBeenCalledTimes(1);
|
||||
const defArgs = vi.mocked(rbacRepo.create).mock.calls[0]![0];
|
||||
expect(defArgs.roleBindings).toEqual([
|
||||
{ role: 'view', resource: 'servers' },
|
||||
{ role: 'run', action: 'logs' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('throws NotFoundError if project does not exist', async () => {
|
||||
await expect(
|
||||
service.create('cuser1', { name: 'mytok', projectId: 'nope' }),
|
||||
).rejects.toThrow(NotFoundError);
|
||||
});
|
||||
|
||||
it('throws ConflictError if active token with same name in same project exists', async () => {
|
||||
vi.mocked(tokenRepo.findByNameAndProject).mockResolvedValueOnce(makeRow());
|
||||
await expect(
|
||||
service.create('cuser1', { name: 'mytok', projectId: PROJECT.id }),
|
||||
).rejects.toThrow(ConflictError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('McpTokenService.introspectRaw', () => {
|
||||
let tokenRepo: ReturnType<typeof mockTokenRepo>;
|
||||
let service: McpTokenService;
|
||||
|
||||
beforeEach(() => {
|
||||
tokenRepo = mockTokenRepo();
|
||||
service = new McpTokenService(tokenRepo, mockProjectRepo(), mockRbacRepo(), mockRbacService());
|
||||
});
|
||||
|
||||
it('returns ok=false for unknown tokens', async () => {
|
||||
const result = await service.introspectRaw(`${TOKEN_PREFIX}unknown`);
|
||||
expect(result.ok).toBe(false);
|
||||
expect(result.tokenName).toBeUndefined();
|
||||
});
|
||||
|
||||
it('returns ok=true and principal info for active tokens, and updates lastUsedAt', async () => {
|
||||
const raw = `${TOKEN_PREFIX}aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa`;
|
||||
const hash = hashToken(raw);
|
||||
vi.mocked(tokenRepo.findByHash).mockResolvedValueOnce(makeRow({ tokenHash: hash }));
|
||||
const result = await service.introspectRaw(raw);
|
||||
expect(result.ok).toBe(true);
|
||||
expect(result.projectName).toBe(PROJECT.name);
|
||||
expect(result.tokenName).toBe('mytok');
|
||||
expect(tokenRepo.touchLastUsed).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('rejects revoked tokens', async () => {
|
||||
const raw = `${TOKEN_PREFIX}bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb`;
|
||||
vi.mocked(tokenRepo.findByHash).mockResolvedValueOnce(makeRow({ tokenHash: hashToken(raw), revokedAt: new Date() }));
|
||||
const result = await service.introspectRaw(raw);
|
||||
expect(result.ok).toBe(false);
|
||||
expect(result.revoked).toBe(true);
|
||||
});
|
||||
|
||||
it('rejects expired tokens', async () => {
|
||||
const raw = `${TOKEN_PREFIX}cccccccccccccccccccccccccccccccc`;
|
||||
const past = new Date(Date.now() - 60_000);
|
||||
vi.mocked(tokenRepo.findByHash).mockResolvedValueOnce(makeRow({ tokenHash: hashToken(raw), expiresAt: past }));
|
||||
const result = await service.introspectRaw(raw);
|
||||
expect(result.ok).toBe(false);
|
||||
expect(result.expired).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('McpTokenService.revoke', () => {
|
||||
it('marks revokedAt and removes the auto-created RbacDefinition', async () => {
|
||||
const tokenRepo = mockTokenRepo();
|
||||
const rbacRepo = mockRbacRepo();
|
||||
const service = new McpTokenService(tokenRepo, mockProjectRepo(), rbacRepo, mockRbacService());
|
||||
|
||||
const row = makeRow();
|
||||
vi.mocked(tokenRepo.findById).mockResolvedValue(row);
|
||||
vi.mocked(rbacRepo.findByName).mockResolvedValue({
|
||||
id: 'rbac-ctok1', name: 'mcptoken-ctok1', subjects: [], roleBindings: [], version: 1, createdAt: new Date(), updatedAt: new Date(),
|
||||
});
|
||||
|
||||
await service.revoke('ctok1');
|
||||
|
||||
expect(tokenRepo.revoke).toHaveBeenCalledWith('ctok1');
|
||||
expect(rbacRepo.findByName).toHaveBeenCalledWith('mcptoken-ctok1');
|
||||
expect(rbacRepo.delete).toHaveBeenCalledWith('rbac-ctok1');
|
||||
});
|
||||
});
|
||||
@@ -10,6 +10,7 @@
|
||||
"clean": "rimraf dist",
|
||||
"dev": "tsx watch src/index.ts",
|
||||
"start": "node dist/index.js",
|
||||
"serve": "node dist/serve.js",
|
||||
"test": "vitest",
|
||||
"test:run": "vitest run",
|
||||
"test:smoke": "vitest run --config vitest.smoke.config.ts"
|
||||
|
||||
@@ -10,11 +10,17 @@ import type { McpdClient } from '../http/mcpd-client.js';
|
||||
const BATCH_SIZE = 50;
|
||||
const FLUSH_INTERVAL_MS = 5_000;
|
||||
|
||||
interface SessionPrincipal {
|
||||
userName?: string;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
}
|
||||
|
||||
export class AuditCollector {
|
||||
private queue: AuditEvent[] = [];
|
||||
private flushTimer: ReturnType<typeof setInterval> | null = null;
|
||||
private flushing = false;
|
||||
private sessionUserNames = new Map<string, string>();
|
||||
private sessionPrincipals = new Map<string, SessionPrincipal>();
|
||||
|
||||
constructor(
|
||||
private readonly mcpdClient: McpdClient,
|
||||
@@ -25,15 +31,31 @@ export class AuditCollector {
|
||||
|
||||
/** Register a userName for a session. All future events for this session auto-fill it. */
|
||||
setSessionUserName(sessionId: string, userName: string): void {
|
||||
this.sessionUserNames.set(sessionId, userName);
|
||||
const existing = this.sessionPrincipals.get(sessionId) ?? {};
|
||||
this.sessionPrincipals.set(sessionId, { ...existing, userName });
|
||||
}
|
||||
|
||||
/** Queue an audit event. Auto-fills projectName and userName (from session map). */
|
||||
/** Register McpToken identity for a session (HTTP-mode authenticated requests). */
|
||||
setSessionMcpToken(sessionId: string, token: { tokenName: string; tokenSha: string }): void {
|
||||
const existing = this.sessionPrincipals.get(sessionId) ?? {};
|
||||
this.sessionPrincipals.set(sessionId, { ...existing, tokenName: token.tokenName, tokenSha: token.tokenSha });
|
||||
}
|
||||
|
||||
/** Look up the McpToken SHA for a session. Returns undefined for non-HTTP-mode sessions. */
|
||||
getSessionMcpTokenSha(sessionId: string): string | undefined {
|
||||
return this.sessionPrincipals.get(sessionId)?.tokenSha;
|
||||
}
|
||||
|
||||
/** Queue an audit event. Auto-fills projectName, userName, tokenName, and tokenSha. */
|
||||
emit(event: Omit<AuditEvent, 'projectName'>): void {
|
||||
const enriched: AuditEvent = { ...event, projectName: this.projectName };
|
||||
if (!enriched.userName && enriched.sessionId) {
|
||||
const name = this.sessionUserNames.get(enriched.sessionId);
|
||||
if (name) enriched.userName = name;
|
||||
if (enriched.sessionId) {
|
||||
const principal = this.sessionPrincipals.get(enriched.sessionId);
|
||||
if (principal) {
|
||||
if (!enriched.userName && principal.userName) enriched.userName = principal.userName;
|
||||
if (!enriched.tokenName && principal.tokenName) enriched.tokenName = principal.tokenName;
|
||||
if (!enriched.tokenSha && principal.tokenSha) enriched.tokenSha = principal.tokenSha;
|
||||
}
|
||||
}
|
||||
this.queue.push(enriched);
|
||||
if (this.queue.length >= BATCH_SIZE) {
|
||||
|
||||
@@ -32,5 +32,9 @@ export interface AuditEvent {
|
||||
correlationId?: string;
|
||||
parentEventId?: string;
|
||||
userName?: string;
|
||||
/** Set when the session authenticated via an McpToken (HTTP-mode mcplocal). */
|
||||
tokenName?: string;
|
||||
/** SHA-256 hash of the McpToken that made the request. */
|
||||
tokenSha?: string;
|
||||
payload: Record<string, unknown>;
|
||||
}
|
||||
|
||||
@@ -46,7 +46,13 @@ export async function refreshProjectUpstreams(
|
||||
servers = await mcpdClient.get<McpdServer[]>(path);
|
||||
}
|
||||
|
||||
return syncUpstreams(router, mcpdClient, servers);
|
||||
// Downstream upstream-proxy calls go through `mcpdClient` too. In HTTP-mode
|
||||
// mcplocal the pod has no credentials of its own, so the default token on
|
||||
// `mcpdClient` is an empty string — every /api/v1/mcp/proxy call would 401.
|
||||
// Bind a per-request client with the caller's bearer so each McpdUpstream
|
||||
// forwards the same identity that passed project discovery.
|
||||
const upstreamClient = authToken ? mcpdClient.withToken(authToken) : mcpdClient;
|
||||
return syncUpstreams(router, upstreamClient, servers);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -3,6 +3,21 @@
|
||||
*
|
||||
* Tracks whether a session has gone through the prompt selection flow.
|
||||
* When gated, only begin_session is accessible. After ungating, all tools work.
|
||||
*
|
||||
* Per-token ungate cache:
|
||||
* When the caller authenticated via an `McpToken` (HTTP-mode service agent),
|
||||
* we also remember the ungate keyed on the token's SHA. Subsequent sessions
|
||||
* from the same token automatically start ungated for a TTL window.
|
||||
*
|
||||
* Why: LiteLLM and similar MCP-proxying clients don't preserve the
|
||||
* `mcp-session-id` header across chat completion calls, so every tool call
|
||||
* lands on a fresh upstream session — which would otherwise be gated anew,
|
||||
* forcing the agent into a begin_session loop. Keying on the token (which IS
|
||||
* preserved, because it's in the Authorization header) gives us a stable
|
||||
* identity that survives stateless proxies.
|
||||
*
|
||||
* Claude Code's stdio path keeps its session-id, so this code is a no-op for
|
||||
* that case (session-id ungate still applies, token ungate is purely additive).
|
||||
*/
|
||||
|
||||
import type { PromptIndexEntry, TagMatchResult } from './tag-matcher.js';
|
||||
@@ -14,15 +29,37 @@ export interface SessionState {
|
||||
briefing: string | null;
|
||||
}
|
||||
|
||||
interface TokenUngateEntry {
|
||||
tokenSha: string;
|
||||
tags: string[];
|
||||
ungatedAt: number;
|
||||
retrievedPrompts: Set<string>;
|
||||
}
|
||||
|
||||
/** Default TTL for per-token ungate cache (1 hour). Tunable via env for testing. */
|
||||
const DEFAULT_TOKEN_UNGATE_TTL_MS = Number(process.env['MCPLOCAL_TOKEN_UNGATE_TTL_MS']) || 60 * 60 * 1000;
|
||||
|
||||
export class SessionGate {
|
||||
private sessions = new Map<string, SessionState>();
|
||||
private tokenUngates = new Map<string, TokenUngateEntry>();
|
||||
private readonly tokenUngateTtlMs: number;
|
||||
|
||||
/** Create a new session. Starts gated if the project is gated. */
|
||||
createSession(sessionId: string, projectGated: boolean): void {
|
||||
constructor(tokenUngateTtlMs = DEFAULT_TOKEN_UNGATE_TTL_MS) {
|
||||
this.tokenUngateTtlMs = tokenUngateTtlMs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new session. Starts gated if the project is gated, UNLESS the
|
||||
* caller's McpToken already ungated within the last TTL window — in which
|
||||
* case the session inherits the previous tags + retrievedPrompts so the
|
||||
* agent doesn't get the full gated greeting on every fresh session.
|
||||
*/
|
||||
createSession(sessionId: string, projectGated: boolean, tokenSha?: string): void {
|
||||
const priorEntry = tokenSha ? this.getActiveTokenEntry(tokenSha) : null;
|
||||
this.sessions.set(sessionId, {
|
||||
gated: projectGated,
|
||||
tags: [],
|
||||
retrievedPrompts: new Set(),
|
||||
gated: projectGated && priorEntry === null,
|
||||
tags: priorEntry ? [...priorEntry.tags] : [],
|
||||
retrievedPrompts: priorEntry ? new Set(priorEntry.retrievedPrompts) : new Set(),
|
||||
briefing: null,
|
||||
});
|
||||
}
|
||||
@@ -37,18 +74,37 @@ export class SessionGate {
|
||||
return this.sessions.get(sessionId)?.gated ?? false;
|
||||
}
|
||||
|
||||
/** Ungate a session after prompt selection is complete. */
|
||||
ungate(sessionId: string, tags: string[], matchResult: TagMatchResult): void {
|
||||
/** True when a token has an active (non-expired) ungate entry. */
|
||||
isTokenUngated(tokenSha: string): boolean {
|
||||
return this.getActiveTokenEntry(tokenSha) !== null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ungate a session after prompt selection is complete.
|
||||
*
|
||||
* When `tokenSha` is supplied, also remember the ungate keyed on the token
|
||||
* so future sessions from the same token start ungated (survives proxies
|
||||
* that drop `mcp-session-id`).
|
||||
*/
|
||||
ungate(sessionId: string, tags: string[], matchResult: TagMatchResult, tokenSha?: string): void {
|
||||
const session = this.sessions.get(sessionId);
|
||||
if (!session) return;
|
||||
|
||||
session.gated = false;
|
||||
session.tags = [...session.tags, ...tags];
|
||||
|
||||
// Track which prompts have been sent
|
||||
for (const p of matchResult.fullContent) {
|
||||
session.retrievedPrompts.add(p.name);
|
||||
}
|
||||
|
||||
if (tokenSha !== undefined && tokenSha !== '') {
|
||||
this.tokenUngates.set(tokenSha, {
|
||||
tokenSha,
|
||||
tags: [...session.tags],
|
||||
ungatedAt: Date.now(),
|
||||
retrievedPrompts: new Set(session.retrievedPrompts),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** Record additional prompts retrieved via read_prompts. */
|
||||
@@ -73,4 +129,19 @@ export class SessionGate {
|
||||
removeSession(sessionId: string): void {
|
||||
this.sessions.delete(sessionId);
|
||||
}
|
||||
|
||||
/** Forget a token's ungate entry (e.g. on revocation signal). */
|
||||
revokeToken(tokenSha: string): void {
|
||||
this.tokenUngates.delete(tokenSha);
|
||||
}
|
||||
|
||||
private getActiveTokenEntry(tokenSha: string): TokenUngateEntry | null {
|
||||
const entry = this.tokenUngates.get(tokenSha);
|
||||
if (!entry) return null;
|
||||
if (Date.now() - entry.ungatedAt > this.tokenUngateTtlMs) {
|
||||
this.tokenUngates.delete(tokenSha);
|
||||
return null;
|
||||
}
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -60,6 +60,16 @@ export class McpdClient {
|
||||
return new McpdClient(this.baseUrl, this.token, { ...this.extraHeaders }, timeoutMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new client with a different Bearer token. The HTTP-mode mcplocal
|
||||
* pod has no credentials of its own — each incoming client request carries
|
||||
* its McpToken, and this method is how we thread that token through to the
|
||||
* McpdUpstream instances created during project discovery.
|
||||
*/
|
||||
withToken(token: string): McpdClient {
|
||||
return new McpdClient(this.baseUrl, token, { ...this.extraHeaders }, this.timeoutMs);
|
||||
}
|
||||
|
||||
async get<T>(path: string): Promise<T> {
|
||||
return this.request<T>('GET', path);
|
||||
}
|
||||
|
||||
@@ -62,21 +62,31 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
|
||||
return existing.router;
|
||||
}
|
||||
|
||||
// HTTP-mode mcplocal has no pod-level credentials — the default
|
||||
// `mcpdClient.token` is an empty string. Every downstream call from this
|
||||
// request (upstream discovery, LLM config fetch, prompt index for
|
||||
// begin_session) has to use the CALLER's McpToken as the bearer, or mcpd
|
||||
// rejects with 401. Build one per-request client here and thread it
|
||||
// everywhere instead of sprinkling `.withToken(authToken)` at each call site.
|
||||
const requestClient = authToken ? mcpdClient.withToken(authToken) : mcpdClient;
|
||||
|
||||
// Create new router or refresh existing one
|
||||
const router = existing?.router ?? new McpRouter();
|
||||
await refreshProjectUpstreams(router, mcpdClient, projectName, authToken);
|
||||
|
||||
// Resolve project LLM model: local override → mcpd recommendation → global default
|
||||
const localOverride = loadProjectLlmOverride(projectName);
|
||||
const mcpdConfig = await fetchProjectLlmConfig(mcpdClient, projectName);
|
||||
const mcpdConfig = await fetchProjectLlmConfig(requestClient, projectName);
|
||||
const resolvedModel = localOverride?.model ?? mcpdConfig.llmModel ?? undefined;
|
||||
|
||||
// If project llmProvider is "none", disable LLM for this project
|
||||
const llmDisabled = mcpdConfig.llmProvider === 'none' || localOverride?.provider === 'none';
|
||||
const effectiveRegistry = llmDisabled ? null : (providerRegistry ?? null);
|
||||
|
||||
// Configure prompt resources with SA-scoped client for RBAC
|
||||
const saClient = mcpdClient.withHeaders({ 'X-Service-Account': `project:${projectName}` });
|
||||
// Configure prompt resources with SA-scoped client for RBAC.
|
||||
// Keep the X-Service-Account header for mcpd-side audit tagging, but carry
|
||||
// the caller's bearer so auth passes (the principal resolves as McpToken:<sha>).
|
||||
const saClient = requestClient.withHeaders({ 'X-Service-Account': `project:${projectName}` });
|
||||
router.setPromptConfig(saClient, projectName);
|
||||
|
||||
// System prompt fetcher for LLM consumers (uses router's cached fetcher)
|
||||
@@ -97,7 +107,8 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
|
||||
?? effectiveRegistry?.getActiveName()
|
||||
?? 'none';
|
||||
const llmModel = resolvedModel ?? 'default';
|
||||
const cache = new FileCache(`${llmProvider}--${llmModel}--${proxyModelName}`);
|
||||
const cacheConfig = process.env.MCPLOCAL_CACHE_DIR ? { dir: process.env.MCPLOCAL_CACHE_DIR } : undefined;
|
||||
const cache = new FileCache(`${llmProvider}--${llmModel}--${proxyModelName}`, cacheConfig);
|
||||
router.setProxyModel(proxyModelName, llmAdapter, cache);
|
||||
|
||||
// Per-server proxymodel overrides (if mcpd provides them)
|
||||
@@ -200,6 +211,17 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
|
||||
void ensureUserName().then((name) => {
|
||||
if (name) collector.setSessionUserName(id, name);
|
||||
});
|
||||
|
||||
// HTTP-mode mcplocal: if the token-auth preHandler attached an McpToken
|
||||
// principal to the request, tag the session so audit events carry the
|
||||
// tokenName/tokenSha alongside (or instead of) userName.
|
||||
const principal = request.mcpToken;
|
||||
if (principal) {
|
||||
collector.setSessionMcpToken(id, {
|
||||
tokenName: principal.tokenName,
|
||||
tokenSha: principal.tokenSha,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Audit: session_bind
|
||||
@@ -388,7 +410,7 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
|
||||
const llmAdapter = providerRegistry
|
||||
? new LLMProviderAdapter(providerRegistry)
|
||||
: { complete: async () => '', available: () => false };
|
||||
const cache = new FileCache('dynamic');
|
||||
const cache = new FileCache('dynamic', process.env.MCPLOCAL_CACHE_DIR ? { dir: process.env.MCPLOCAL_CACHE_DIR } : undefined);
|
||||
|
||||
if (serverName && serverProxyModel) {
|
||||
entry.router.setServerProxyModel(serverName, serverProxyModel, llmAdapter, cache);
|
||||
|
||||
114
src/mcplocal/src/http/token-auth.ts
Normal file
114
src/mcplocal/src/http/token-auth.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
/**
|
||||
* Fastify preHandler that authenticates `/projects/*` and `/mcp` requests
|
||||
* against mcpd's McpToken introspection endpoint.
|
||||
*
|
||||
* Flow:
|
||||
* 1. Reject non-Bearer and non-`mcpctl_pat_` auth up front.
|
||||
* 2. Call `GET <mcpd>/api/v1/mcptokens/introspect` with the raw bearer.
|
||||
* 3. Cache the result (positive + negative TTLs) to avoid a round-trip per MCP call.
|
||||
* 4. Enforce `request.params.projectName === response.projectName`.
|
||||
* 5. Stash the principal on `request.mcpToken` for the audit collector.
|
||||
*/
|
||||
import type { FastifyRequest, FastifyReply } from 'fastify';
|
||||
import { isMcpToken, hashToken } from '@mcpctl/shared';
|
||||
|
||||
export interface TokenAuthOptions {
|
||||
mcpdUrl: string;
|
||||
/** TTL for a successful introspection, ms. Default 30_000. */
|
||||
positiveTtlMs?: number;
|
||||
/** TTL for a failed introspection, ms. Default 5_000. */
|
||||
negativeTtlMs?: number;
|
||||
/** Injectable HTTP fetcher for tests. Defaults to `fetch`. */
|
||||
fetch?: (url: string, init?: RequestInit) => Promise<Response>;
|
||||
}
|
||||
|
||||
export interface McpTokenPrincipal {
|
||||
tokenName: string;
|
||||
tokenSha: string;
|
||||
projectName: string;
|
||||
}
|
||||
|
||||
declare module 'fastify' {
|
||||
interface FastifyRequest {
|
||||
/** Populated by the token-auth preHandler when the bearer was a McpToken. */
|
||||
mcpToken?: McpTokenPrincipal;
|
||||
}
|
||||
}
|
||||
|
||||
interface IntrospectResponse {
|
||||
ok: boolean;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
projectName?: string;
|
||||
revoked?: boolean;
|
||||
expired?: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface CacheEntry {
|
||||
result: IntrospectResponse;
|
||||
expiresAt: number;
|
||||
}
|
||||
|
||||
export function createTokenAuthMiddleware(opts: TokenAuthOptions) {
|
||||
const positiveTtl = opts.positiveTtlMs ?? 30_000;
|
||||
const negativeTtl = opts.negativeTtlMs ?? 5_000;
|
||||
const fetchImpl = opts.fetch ?? (globalThis.fetch as typeof fetch);
|
||||
const cache = new Map<string, CacheEntry>();
|
||||
|
||||
async function introspect(raw: string): Promise<IntrospectResponse> {
|
||||
const key = hashToken(raw);
|
||||
const now = Date.now();
|
||||
const hit = cache.get(key);
|
||||
if (hit && hit.expiresAt > now) return hit.result;
|
||||
|
||||
try {
|
||||
const res = await fetchImpl(`${opts.mcpdUrl.replace(/\/$/, '')}/api/v1/mcptokens/introspect`, {
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Bearer ${raw}` },
|
||||
});
|
||||
const body = (await res.json().catch(() => ({ ok: false, error: 'unreadable body' }))) as IntrospectResponse;
|
||||
const result: IntrospectResponse = res.ok ? body : { ...body, ok: false };
|
||||
cache.set(key, { result, expiresAt: now + (result.ok ? positiveTtl : negativeTtl) });
|
||||
return result;
|
||||
} catch (err) {
|
||||
const result: IntrospectResponse = { ok: false, error: err instanceof Error ? err.message : String(err) };
|
||||
cache.set(key, { result, expiresAt: now + negativeTtl });
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
return async function tokenAuth(request: FastifyRequest, reply: FastifyReply): Promise<void> {
|
||||
const header = request.headers.authorization;
|
||||
if (header === undefined || !header.startsWith('Bearer ')) {
|
||||
reply.code(401).send({ error: 'Missing Authorization bearer' });
|
||||
return;
|
||||
}
|
||||
const raw = header.slice(7);
|
||||
if (!isMcpToken(raw)) {
|
||||
reply.code(401).send({ error: 'Only mcpctl_pat_ bearers are accepted on this endpoint' });
|
||||
return;
|
||||
}
|
||||
|
||||
const introspection = await introspect(raw);
|
||||
if (!introspection.ok) {
|
||||
reply.code(401).send({
|
||||
error: introspection.revoked ? 'Token revoked' : introspection.expired ? 'Token expired' : 'Invalid token',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Project-scope check: token.projectName must match the path param.
|
||||
const params = request.params as { projectName?: string } | undefined;
|
||||
if (params?.projectName !== undefined && params.projectName !== introspection.projectName) {
|
||||
reply.code(403).send({ error: `Token is not valid for project '${params.projectName}'` });
|
||||
return;
|
||||
}
|
||||
|
||||
request.mcpToken = {
|
||||
tokenName: introspection.tokenName!,
|
||||
tokenSha: introspection.tokenSha!,
|
||||
projectName: introspection.projectName!,
|
||||
};
|
||||
};
|
||||
}
|
||||
@@ -25,6 +25,13 @@ export interface PluginContextDeps {
|
||||
queueNotification: (notification: JsonRpcNotification) => void;
|
||||
postToMcpd: (path: string, body: Record<string, unknown>) => Promise<unknown>;
|
||||
auditCollector?: AuditCollector;
|
||||
/**
|
||||
* Resolves the principal's McpToken SHA for this session, if the caller
|
||||
* authenticated via an McpToken. Called lazily so the value reflects the
|
||||
* session's current state even when the token is attached after the plugin
|
||||
* context is created.
|
||||
*/
|
||||
getMcpTokenSha?: () => string | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -55,6 +62,11 @@ export class PluginContextImpl implements PluginSessionContext {
|
||||
this.deps = deps;
|
||||
}
|
||||
|
||||
/** McpToken SHA for the current caller, or undefined for STDIO/session-auth callers. */
|
||||
getMcpTokenSha(): string | undefined {
|
||||
return this.deps.getMcpTokenSha?.();
|
||||
}
|
||||
|
||||
registerTool(tool: ToolDefinition, handler: VirtualToolHandler): void {
|
||||
this.virtualTools.set(tool.name, { definition: tool, handler });
|
||||
}
|
||||
|
||||
@@ -50,6 +50,14 @@ export interface PluginSessionContext {
|
||||
|
||||
// Audit event emission (auto-fills sessionId and projectName)
|
||||
emitAuditEvent(event: Omit<AuditEvent, 'sessionId' | 'projectName'>): void;
|
||||
|
||||
/**
|
||||
* McpToken SHA for the current caller, or undefined if the session was
|
||||
* authenticated via a User session (STDIO/Claude Code path). Plugins can use
|
||||
* this to key state on the token principal rather than the session-id —
|
||||
* useful when the session-id doesn't survive a proxy (e.g. LiteLLM).
|
||||
*/
|
||||
getMcpTokenSha(): string | undefined;
|
||||
}
|
||||
|
||||
// ── Virtual Server ──────────────────────────────────────────────────
|
||||
|
||||
@@ -40,7 +40,11 @@ export function createGatePlugin(config: GatePluginConfig = {}): ProxyModelPlugi
|
||||
description: 'Gated session flow: begin_session → prompt selection → ungate.',
|
||||
|
||||
async onSessionCreate(ctx) {
|
||||
sessionGate.createSession(ctx.sessionId, isGated);
|
||||
// Pass the caller's McpToken SHA so the gate can honor a cross-session
|
||||
// ungate cache keyed on the token principal. Fixes the LiteLLM case where
|
||||
// each tool call lands on a fresh mcp-session-id → would otherwise loop
|
||||
// on begin_session forever.
|
||||
sessionGate.createSession(ctx.sessionId, isGated, ctx.getMcpTokenSha());
|
||||
|
||||
// Register begin_session virtual tool
|
||||
ctx.registerTool(getBeginSessionTool(llmSelector), async (args, callCtx) => {
|
||||
@@ -264,8 +268,9 @@ async function handleBeginSession(
|
||||
matchResult = tagMatcher.match(tags, promptIndex);
|
||||
}
|
||||
|
||||
// Ungate the session
|
||||
sessionGate.ungate(ctx.sessionId, tags, matchResult);
|
||||
// Ungate the session (and remember the ungate per McpToken if this is a
|
||||
// service-token request, so the next session from the same token skips the gate).
|
||||
sessionGate.ungate(ctx.sessionId, tags, matchResult, ctx.getMcpTokenSha());
|
||||
ctx.queueNotification('notifications/tools/list_changed');
|
||||
|
||||
// Audit: gate_decision for begin_session
|
||||
@@ -451,8 +456,8 @@ async function handleGatedIntercept(
|
||||
const promptIndex = await ctx.fetchPromptIndex();
|
||||
const matchResult = tagMatcher.match(tags, promptIndex);
|
||||
|
||||
// Ungate the session
|
||||
sessionGate.ungate(ctx.sessionId, tags, matchResult);
|
||||
// Ungate the session (and remember per-token if the caller is a McpToken).
|
||||
sessionGate.ungate(ctx.sessionId, tags, matchResult, ctx.getMcpTokenSha());
|
||||
ctx.queueNotification('notifications/tools/list_changed');
|
||||
|
||||
// Audit: gate_decision for auto-intercept
|
||||
@@ -522,7 +527,7 @@ async function handleGatedIntercept(
|
||||
return response;
|
||||
} catch {
|
||||
// If prompt retrieval fails, just ungate and route normally
|
||||
sessionGate.ungate(ctx.sessionId, tags, { fullContent: [], indexOnly: [], remaining: [] });
|
||||
sessionGate.ungate(ctx.sessionId, tags, { fullContent: [], indexOnly: [], remaining: [] }, ctx.getMcpTokenSha());
|
||||
ctx.queueNotification('notifications/tools/list_changed');
|
||||
return ctx.routeToUpstream(request);
|
||||
}
|
||||
|
||||
@@ -198,6 +198,10 @@ export class McpRouter {
|
||||
return this.mcpdClient.post(path, body);
|
||||
},
|
||||
...(this.auditCollector ? { auditCollector: this.auditCollector } : {}),
|
||||
// Lazily resolve the caller's McpToken SHA via the audit collector's
|
||||
// session principal map. The token is attached in onsessioninitialized,
|
||||
// which runs before any plugin context is created, so this is stable.
|
||||
getMcpTokenSha: () => this.auditCollector?.getSessionMcpTokenSha(sessionId),
|
||||
};
|
||||
|
||||
ctx = new PluginContextImpl(deps);
|
||||
|
||||
111
src/mcplocal/src/serve.ts
Normal file
111
src/mcplocal/src/serve.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* HTTP-only entry for the containerized mcplocal (deployed behind Ingress as `mcp.ad.itaz.eu`).
|
||||
*
|
||||
* Differences from main.ts (the STDIO/systemd entry):
|
||||
* - No StdioProxyServer (there's no stdin/stdout MCP client in a pod).
|
||||
* - No `--upstream` flag (upstreams come from mcpd project discovery).
|
||||
* - Host + port from env (MCPLOCAL_HTTP_HOST / MCPLOCAL_HTTP_PORT).
|
||||
* - Requires MCPLOCAL_MCPD_URL to point at mcpd inside the cluster.
|
||||
* - Registers a token-auth preHandler on `/projects/*` and `/mcp`.
|
||||
* - FileCache directory honours MCPLOCAL_CACHE_DIR (wired via project-mcp-endpoint).
|
||||
*
|
||||
* Identity model: **the pod has no persistent identity to mcpd.** Every
|
||||
* inbound request's `Authorization: Bearer mcpctl_pat_…` is forwarded
|
||||
* verbatim for all downstream mcpd calls (introspect + project
|
||||
* discovery). mcpd's auth middleware dispatches on the `mcpctl_pat_`
|
||||
* prefix and resolves the McpToken principal. As a result there is
|
||||
* deliberately no MCPLOCAL_MCPD_TOKEN env var — adding one would only
|
||||
* create a rotation problem for a state we don't need.
|
||||
*/
|
||||
import { McpRouter } from './router.js';
|
||||
import { createHttpServer } from './http/server.js';
|
||||
import { loadHttpConfig, loadLlmProviders } from './http/config.js';
|
||||
import { createProvidersFromConfig } from './llm-config.js';
|
||||
import { createSecretStore } from '@mcpctl/shared';
|
||||
import { reloadStages, startWatchers, stopWatchers } from './proxymodel/watcher.js';
|
||||
import { createTokenAuthMiddleware } from './http/token-auth.js';
|
||||
|
||||
function requireEnv(name: string): string {
|
||||
const value = process.env[name];
|
||||
if (value === undefined || value === '') {
|
||||
throw new Error(`Required env var ${name} is not set`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
export async function serve(): Promise<void> {
|
||||
const mcpdUrl = requireEnv('MCPLOCAL_MCPD_URL');
|
||||
const httpHost = process.env.MCPLOCAL_HTTP_HOST ?? '0.0.0.0';
|
||||
const httpPort = Number(process.env.MCPLOCAL_HTTP_PORT ?? '3200');
|
||||
if (!Number.isFinite(httpPort) || httpPort <= 0) {
|
||||
throw new Error(`Invalid MCPLOCAL_HTTP_PORT: ${process.env.MCPLOCAL_HTTP_PORT}`);
|
||||
}
|
||||
// MCPLOCAL_CACHE_DIR is optional; FileCache reads it directly.
|
||||
const cacheDir = process.env.MCPLOCAL_CACHE_DIR;
|
||||
|
||||
// loadHttpConfig reads user-level config.json; we override with env.
|
||||
const baseConfig = loadHttpConfig();
|
||||
const httpConfig = {
|
||||
...baseConfig,
|
||||
httpHost,
|
||||
httpPort,
|
||||
mcpdUrl,
|
||||
};
|
||||
|
||||
// LLM providers (configured via mounted ConfigMap at ~/.mcpctl/config.json or env).
|
||||
const llmEntries = loadLlmProviders();
|
||||
const secretStore = await createSecretStore();
|
||||
const providerRegistry = await createProvidersFromConfig(llmEntries, secretStore);
|
||||
|
||||
process.stderr.write(
|
||||
`mcplocal-serve: mcpd=${mcpdUrl} host=${httpHost} port=${httpPort} cache=${cacheDir ?? '~/.mcpctl/cache'}\n`,
|
||||
);
|
||||
|
||||
const router = new McpRouter();
|
||||
|
||||
const httpServer = await createHttpServer(httpConfig, { router, providerRegistry });
|
||||
|
||||
// Auth preHandler: only protect the MCP surfaces. /health, /healthz, /proxymodels etc stay open.
|
||||
// Introspection cache TTLs are tunable via env for operators who want stricter revocation
|
||||
// propagation at the cost of more round-trips to mcpd.
|
||||
const positiveTtlMs = Number(process.env.MCPLOCAL_TOKEN_POSITIVE_TTL_MS ?? '30000');
|
||||
const negativeTtlMs = Number(process.env.MCPLOCAL_TOKEN_NEGATIVE_TTL_MS ?? '5000');
|
||||
const tokenAuth = createTokenAuthMiddleware({ mcpdUrl, positiveTtlMs, negativeTtlMs });
|
||||
httpServer.addHook('preHandler', async (request, reply) => {
|
||||
const url = request.url;
|
||||
if (!url.startsWith('/projects/') && !url.startsWith('/mcp')) return;
|
||||
await tokenAuth(request, reply);
|
||||
});
|
||||
|
||||
await httpServer.listen({ port: httpPort, host: httpHost });
|
||||
process.stderr.write(`mcplocal-serve listening on ${httpHost}:${httpPort}\n`);
|
||||
|
||||
// Hot-reload proxymodel stages from ~/.mcpctl/stages (same as main.ts).
|
||||
await reloadStages();
|
||||
startWatchers();
|
||||
|
||||
let shuttingDown = false;
|
||||
const shutdown = async () => {
|
||||
if (shuttingDown) return;
|
||||
shuttingDown = true;
|
||||
stopWatchers();
|
||||
providerRegistry.disposeAll();
|
||||
await httpServer.close();
|
||||
await router.closeAll();
|
||||
process.exit(0);
|
||||
};
|
||||
process.on('SIGTERM', () => void shutdown());
|
||||
process.on('SIGINT', () => void shutdown());
|
||||
}
|
||||
|
||||
const isMain =
|
||||
process.argv[1]?.endsWith('serve.js') ||
|
||||
process.argv[1]?.endsWith('serve.ts');
|
||||
|
||||
if (isMain) {
|
||||
serve().catch((err) => {
|
||||
process.stderr.write(`Fatal: ${err}\n`);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
162
src/mcplocal/tests/http/token-auth.test.ts
Normal file
162
src/mcplocal/tests/http/token-auth.test.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
/**
|
||||
* Unit tests for the HTTP-mode token-auth preHandler.
|
||||
*
|
||||
* Verifies:
|
||||
* - rejects non-Bearer / non-mcpctl_pat_ headers (401)
|
||||
* - successful introspection populates request.mcpToken
|
||||
* - positive results are cached up to the positive TTL
|
||||
* - **revoked tokens surface as 401 within the negative-TTL window** ≤ 5s
|
||||
* - wrong-project path → 403
|
||||
*/
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import Fastify from 'fastify';
|
||||
import { createTokenAuthMiddleware } from '../../src/http/token-auth.js';
|
||||
|
||||
interface IntrospectResponse {
|
||||
ok: boolean;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
projectName?: string;
|
||||
revoked?: boolean;
|
||||
expired?: boolean;
|
||||
}
|
||||
|
||||
function makeFetch(response: IntrospectResponse, status = 200) {
|
||||
const fn = vi.fn(async () => ({
|
||||
ok: status >= 200 && status < 300,
|
||||
json: async () => response,
|
||||
}) as unknown as Response);
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function setupApp(deps: Parameters<typeof createTokenAuthMiddleware>[0]) {
|
||||
const app = Fastify({ logger: false });
|
||||
const middleware = createTokenAuthMiddleware(deps);
|
||||
app.addHook('preHandler', middleware);
|
||||
app.get('/projects/:projectName/mcp', async (request) => ({
|
||||
ok: true,
|
||||
mcpToken: request.mcpToken,
|
||||
}));
|
||||
await app.ready();
|
||||
return app;
|
||||
}
|
||||
|
||||
describe('token-auth preHandler', () => {
|
||||
it('rejects requests with no Authorization header (401)', async () => {
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: makeFetch({ ok: true }) });
|
||||
const res = await app.inject({ method: 'GET', url: '/projects/foo/mcp' });
|
||||
expect(res.statusCode).toBe(401);
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('rejects bearers that are not mcpctl_pat_ tokens (401)', async () => {
|
||||
const fetchFn = makeFetch({ ok: true });
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: fetchFn });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/projects/foo/mcp',
|
||||
headers: { authorization: 'Bearer some-session-token' },
|
||||
});
|
||||
expect(res.statusCode).toBe(401);
|
||||
expect(fetchFn).not.toHaveBeenCalled();
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('passes valid tokens and populates request.mcpToken', async () => {
|
||||
const fetchFn = makeFetch({ ok: true, tokenName: 'demo', tokenSha: 'abc', projectName: 'foo' });
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: fetchFn });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/projects/foo/mcp',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_valid' },
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
const body = res.json<{ mcpToken: { tokenName: string; projectName: string } }>();
|
||||
expect(body.mcpToken.tokenName).toBe('demo');
|
||||
expect(body.mcpToken.projectName).toBe('foo');
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('rejects with 403 when the token is bound to a different project', async () => {
|
||||
const fetchFn = makeFetch({ ok: true, tokenName: 'demo', tokenSha: 'abc', projectName: 'foo' });
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: fetchFn });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/projects/other/mcp',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_valid' },
|
||||
});
|
||||
expect(res.statusCode).toBe(403);
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('caches positive introspections (does not re-hit mcpd within TTL)', async () => {
|
||||
const fetchFn = makeFetch({ ok: true, tokenName: 'demo', tokenSha: 'abc', projectName: 'foo' });
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: fetchFn, positiveTtlMs: 30_000 });
|
||||
const h = { authorization: 'Bearer mcpctl_pat_valid' };
|
||||
await app.inject({ method: 'GET', url: '/projects/foo/mcp', headers: h });
|
||||
await app.inject({ method: 'GET', url: '/projects/foo/mcp', headers: h });
|
||||
await app.inject({ method: 'GET', url: '/projects/foo/mcp', headers: h });
|
||||
expect(fetchFn).toHaveBeenCalledTimes(1);
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('surfaces revocation as 401 within the 5s negative cache (lag ≤ 5s)', async () => {
|
||||
// Simulate a revocation: first call returns ok:true, then flip to ok:false+revoked.
|
||||
let revoked = false;
|
||||
const fetchFn = vi.fn(async () => ({
|
||||
ok: !revoked,
|
||||
json: async () => revoked
|
||||
? { ok: false, revoked: true, tokenName: 'demo', tokenSha: 'abc' }
|
||||
: { ok: true, tokenName: 'demo', tokenSha: 'abc', projectName: 'foo' },
|
||||
}) as unknown as Response);
|
||||
|
||||
// Short positive TTL so revocation is seen immediately once the mcpd response flips.
|
||||
const app = await setupApp({
|
||||
mcpdUrl: 'http://mcpd',
|
||||
fetch: fetchFn,
|
||||
positiveTtlMs: 10,
|
||||
negativeTtlMs: 5_000,
|
||||
});
|
||||
const h = { authorization: 'Bearer mcpctl_pat_valid' };
|
||||
|
||||
const first = await app.inject({ method: 'GET', url: '/projects/foo/mcp', headers: h });
|
||||
expect(first.statusCode).toBe(200);
|
||||
|
||||
// Revoke out-of-band.
|
||||
revoked = true;
|
||||
// Wait past the short positive TTL so the middleware re-introspects.
|
||||
await new Promise((r) => setTimeout(r, 15));
|
||||
|
||||
const second = await app.inject({ method: 'GET', url: '/projects/foo/mcp', headers: h });
|
||||
expect(second.statusCode).toBe(401);
|
||||
expect(second.json<{ error: string }>().error).toContain('revoked');
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('returns 401 when mcpd introspect returns ok:false (unknown / invalid token)', async () => {
|
||||
const fetchFn = vi.fn(async () => ({
|
||||
ok: false,
|
||||
json: async () => ({ ok: false, error: 'Invalid token' }),
|
||||
}) as unknown as Response);
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: fetchFn });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/projects/foo/mcp',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_unknown' },
|
||||
});
|
||||
expect(res.statusCode).toBe(401);
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('returns 401 (not a crash) when mcpd is unreachable', async () => {
|
||||
const fetchFn = vi.fn(async () => { throw new Error('ECONNREFUSED'); });
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: fetchFn });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/projects/foo/mcp',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_valid' },
|
||||
});
|
||||
expect(res.statusCode).toBe(401);
|
||||
await app.close();
|
||||
});
|
||||
});
|
||||
@@ -13,6 +13,7 @@ function mockMcpdClient(servers: Array<{ id: string; name: string; transport: st
|
||||
forward: vi.fn(async () => ({ status: 200, body: servers })),
|
||||
withTimeout: vi.fn(() => client),
|
||||
withHeaders: vi.fn(() => client),
|
||||
withToken: vi.fn(() => client),
|
||||
};
|
||||
return client;
|
||||
}
|
||||
|
||||
@@ -30,9 +30,13 @@ function mockMcpdClient() {
|
||||
delete: vi.fn(),
|
||||
forward: vi.fn(async () => ({ status: 200, body: [] })),
|
||||
withHeaders: vi.fn(),
|
||||
withToken: vi.fn(),
|
||||
withTimeout: vi.fn(),
|
||||
};
|
||||
// withHeaders returns a new client-like object (returns self for simplicity)
|
||||
// Chainable withX returns the same client for simplicity
|
||||
(client.withHeaders as ReturnType<typeof vi.fn>).mockReturnValue(client);
|
||||
(client.withToken as ReturnType<typeof vi.fn>).mockReturnValue(client);
|
||||
(client.withTimeout as ReturnType<typeof vi.fn>).mockReturnValue(client);
|
||||
return client;
|
||||
}
|
||||
|
||||
|
||||
@@ -152,4 +152,76 @@ describe('SessionGate', () => {
|
||||
expect(gate.isGated('s1')).toBe(false);
|
||||
expect(gate.getSession('s2')!.tags).toEqual([]); // s2 untouched
|
||||
});
|
||||
|
||||
describe('per-McpToken ungate cache', () => {
|
||||
it('new session from an already-ungated token starts ungated, with prior tags + prompts', () => {
|
||||
const gate = new SessionGate();
|
||||
gate.createSession('session-1', true, 'tokA');
|
||||
expect(gate.isGated('session-1')).toBe(true);
|
||||
|
||||
gate.ungate('session-1', ['ops'], makeMatchResult(['runbook']), 'tokA');
|
||||
expect(gate.isTokenUngated('tokA')).toBe(true);
|
||||
|
||||
// LiteLLM semantics: same token, brand-new session-id.
|
||||
gate.createSession('session-2', true, 'tokA');
|
||||
expect(gate.isGated('session-2')).toBe(false);
|
||||
const s2 = gate.getSession('session-2')!;
|
||||
expect(s2.tags).toContain('ops');
|
||||
expect(s2.retrievedPrompts.has('runbook')).toBe(true);
|
||||
});
|
||||
|
||||
it('does not persist across tokens', () => {
|
||||
const gate = new SessionGate();
|
||||
gate.createSession('s1', true, 'tokA');
|
||||
gate.ungate('s1', ['ops'], makeMatchResult(['p']), 'tokA');
|
||||
|
||||
// Different token → fresh gated session.
|
||||
gate.createSession('s2', true, 'tokB');
|
||||
expect(gate.isGated('s2')).toBe(true);
|
||||
expect(gate.isTokenUngated('tokB')).toBe(false);
|
||||
});
|
||||
|
||||
it('is not triggered when no tokenSha is supplied (STDIO path)', () => {
|
||||
const gate = new SessionGate();
|
||||
gate.createSession('s1', true);
|
||||
gate.ungate('s1', ['ops'], makeMatchResult(['p']));
|
||||
|
||||
// A second session with no token starts gated — STDIO semantics preserved.
|
||||
gate.createSession('s2', true);
|
||||
expect(gate.isGated('s2')).toBe(true);
|
||||
});
|
||||
|
||||
it('honors the TTL window and expires', () => {
|
||||
const gate = new SessionGate(50); // 50ms TTL for the test
|
||||
gate.createSession('s1', true, 'tokA');
|
||||
gate.ungate('s1', ['ops'], makeMatchResult(['p']), 'tokA');
|
||||
expect(gate.isTokenUngated('tokA')).toBe(true);
|
||||
|
||||
return new Promise<void>((resolve) => setTimeout(() => {
|
||||
expect(gate.isTokenUngated('tokA')).toBe(false);
|
||||
gate.createSession('s2', true, 'tokA');
|
||||
expect(gate.isGated('s2')).toBe(true);
|
||||
resolve();
|
||||
}, 70));
|
||||
});
|
||||
|
||||
it('revokeToken clears the ungate entry immediately', () => {
|
||||
const gate = new SessionGate();
|
||||
gate.createSession('s1', true, 'tokA');
|
||||
gate.ungate('s1', ['ops'], makeMatchResult(['p']), 'tokA');
|
||||
expect(gate.isTokenUngated('tokA')).toBe(true);
|
||||
|
||||
gate.revokeToken('tokA');
|
||||
expect(gate.isTokenUngated('tokA')).toBe(false);
|
||||
gate.createSession('s2', true, 'tokA');
|
||||
expect(gate.isGated('s2')).toBe(true);
|
||||
});
|
||||
|
||||
it('empty-string tokenSha does not register an ungate entry', () => {
|
||||
const gate = new SessionGate();
|
||||
gate.createSession('s1', true, '');
|
||||
gate.ungate('s1', ['ops'], makeMatchResult(['p']), '');
|
||||
expect(gate.isTokenUngated('')).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
158
src/mcplocal/tests/smoke/mcptoken.smoke.test.ts
Normal file
158
src/mcplocal/tests/smoke/mcptoken.smoke.test.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
/**
|
||||
* Smoke tests: McpToken + HTTP-mode mcplocal end-to-end.
|
||||
*
|
||||
* Exercises the full public CLI contract:
|
||||
* 1. `mcpctl create project` + `mcpctl create mcptoken`
|
||||
* 2. `mcpctl test mcp <url> --token $TOK --expect-tools …` → exit 0
|
||||
* 3. Same token against a different project → exit 1 (403)
|
||||
* 4. Revoke the token, retry → exit 1 (401) within the negative-cache window
|
||||
* 5. --expect-tools <nonexistent> → exit 2 (contract failure)
|
||||
*
|
||||
* Target endpoint: $MCPGW_URL (default https://mcp.ad.itaz.eu). The containerized
|
||||
* mcplocal must be deployed and reachable. If the /healthz preflight fails we
|
||||
* skip the whole suite with a clear message.
|
||||
*
|
||||
* Run with: pnpm test:smoke
|
||||
*/
|
||||
import { describe, it, expect, beforeAll } from 'vitest';
|
||||
import http from 'node:http';
|
||||
import https from 'node:https';
|
||||
import { execSync } from 'node:child_process';
|
||||
|
||||
const MCPGW_URL = process.env.MCPGW_URL ?? 'https://mcp.ad.itaz.eu';
|
||||
const PROJECT_NAME = `smoke-mcptoken-${Date.now().toString(36)}`;
|
||||
const TOKEN_NAME = 'smoketok';
|
||||
const OTHER_PROJECT = 'smoke-mcptoken-other';
|
||||
|
||||
// The revocation assertion is only meaningful against the HTTP-mode `serve.ts`
|
||||
// entry, which has the token-introspection cache (5s negative TTL). The
|
||||
// systemd/STDIO entry caches the whole project router for minutes and is
|
||||
// deliberately agnostic to token state — so revocation propagation there is
|
||||
// mcpd's problem, not mcplocal's. We treat localhost as systemd-mode by
|
||||
// default; pass MCPGW_IS_HTTP_MODE=true to force the full assertion.
|
||||
const IS_HTTP_MODE = process.env.MCPGW_IS_HTTP_MODE === 'true'
|
||||
|| (!/^(http|https):\/\/(localhost|127\.|0\.0\.0\.0)/i.test(MCPGW_URL));
|
||||
|
||||
interface CliResult { code: number; stdout: string; stderr: string }
|
||||
|
||||
function run(args: string): CliResult {
|
||||
try {
|
||||
const stdout = execSync(`mcpctl ${args}`, {
|
||||
encoding: 'utf-8',
|
||||
timeout: 30_000,
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
return { code: 0, stdout: stdout.trim(), stderr: '' };
|
||||
} catch (err) {
|
||||
const e = err as { status?: number; stdout?: Buffer | string; stderr?: Buffer | string };
|
||||
return {
|
||||
code: e.status ?? 1,
|
||||
stdout: e.stdout ? (typeof e.stdout === 'string' ? e.stdout : e.stdout.toString('utf-8')) : '',
|
||||
stderr: e.stderr ? (typeof e.stderr === 'string' ? e.stderr : e.stderr.toString('utf-8')) : '',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function healthz(url: string, timeoutMs = 5000): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
const parsed = new URL(`${url.replace(/\/$/, '')}/healthz`);
|
||||
const driver = parsed.protocol === 'https:' ? https : http;
|
||||
const req = driver.get(
|
||||
{
|
||||
hostname: parsed.hostname,
|
||||
port: parsed.port || (parsed.protocol === 'https:' ? 443 : 80),
|
||||
path: parsed.pathname,
|
||||
timeout: timeoutMs,
|
||||
},
|
||||
(res) => {
|
||||
resolve((res.statusCode ?? 500) < 500);
|
||||
res.resume();
|
||||
},
|
||||
);
|
||||
req.on('error', () => resolve(false));
|
||||
req.on('timeout', () => { req.destroy(); resolve(false); });
|
||||
});
|
||||
}
|
||||
|
||||
let gatewayUp = false;
|
||||
let rawToken = '';
|
||||
let knownToolName: string | undefined;
|
||||
|
||||
describe('mcptoken smoke', () => {
|
||||
beforeAll(async () => {
|
||||
gatewayUp = await healthz(MCPGW_URL);
|
||||
if (!gatewayUp) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(`\n ○ mcptoken smoke: skipped — ${MCPGW_URL}/healthz unreachable. Set MCPGW_URL to override.\n`);
|
||||
}
|
||||
}, 20_000);
|
||||
|
||||
it('creates the project and a project-scoped mcptoken', () => {
|
||||
if (!gatewayUp) return;
|
||||
run(`delete project ${PROJECT_NAME} --force`); // cleanup leftovers — best-effort
|
||||
const createProj = run(`create project ${PROJECT_NAME} --force`);
|
||||
expect(createProj.code).toBe(0);
|
||||
|
||||
const createTok = run(`create mcptoken ${TOKEN_NAME} --project ${PROJECT_NAME} --rbac clone`);
|
||||
expect(createTok.code).toBe(0);
|
||||
const match = createTok.stdout.match(/mcpctl_pat_[A-Za-z0-9]+/);
|
||||
expect(match, 'raw token was printed to stdout').not.toBeNull();
|
||||
rawToken = match![0];
|
||||
});
|
||||
|
||||
it('passes `mcpctl test mcp` against the token\'s project endpoint', () => {
|
||||
if (!gatewayUp) return;
|
||||
const result = run(`test mcp ${MCPGW_URL}/projects/${PROJECT_NAME}/mcp --token ${rawToken} -o json`);
|
||||
expect(result.code, result.stderr || result.stdout).toBe(0);
|
||||
const report = JSON.parse(result.stdout.slice(result.stdout.indexOf('{'))) as {
|
||||
exitCode: number;
|
||||
tools: string[] | null;
|
||||
initialize: string;
|
||||
};
|
||||
expect(report.exitCode).toBe(0);
|
||||
expect(report.initialize).toBe('ok');
|
||||
expect(Array.isArray(report.tools)).toBe(true);
|
||||
knownToolName = report.tools?.[0];
|
||||
});
|
||||
|
||||
it('fails `mcpctl test mcp` against a different project with 403', () => {
|
||||
if (!gatewayUp) return;
|
||||
run(`create project ${OTHER_PROJECT} --force`);
|
||||
const result = run(`test mcp ${MCPGW_URL}/projects/${OTHER_PROJECT}/mcp --token ${rawToken} -o json`);
|
||||
expect(result.code).toBe(1);
|
||||
const report = JSON.parse(result.stdout.slice(result.stdout.indexOf('{'))) as { error?: string };
|
||||
expect(report.error ?? '').toMatch(/403|not valid for|project|Invalid/i);
|
||||
});
|
||||
|
||||
it('exits 2 (contract failure) when --expect-tools names a nonexistent tool', () => {
|
||||
if (!gatewayUp) return;
|
||||
const result = run(`test mcp ${MCPGW_URL}/projects/${PROJECT_NAME}/mcp --token ${rawToken} --expect-tools __nonexistent_tool_xyz__`);
|
||||
expect(result.code).toBe(2);
|
||||
});
|
||||
|
||||
it('returns 401 after the token is revoked (within the negative-cache window)', async () => {
|
||||
if (!gatewayUp) return;
|
||||
if (!IS_HTTP_MODE) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(' ○ revocation assertion skipped — systemd mcplocal caches the project router, so this case is only meaningful against the HTTP-mode serve.ts entry. Set MCPGW_IS_HTTP_MODE=true to force it.');
|
||||
// Still delete the token so cleanup runs the same way.
|
||||
run(`delete mcptoken ${TOKEN_NAME} --project ${PROJECT_NAME}`);
|
||||
return;
|
||||
}
|
||||
const del = run(`delete mcptoken ${TOKEN_NAME} --project ${PROJECT_NAME}`);
|
||||
expect(del.code).toBe(0);
|
||||
// Introspection negative TTL defaults to 5s — wait 7s to be safe.
|
||||
await new Promise((r) => setTimeout(r, 7_000));
|
||||
const result = run(`test mcp ${MCPGW_URL}/projects/${PROJECT_NAME}/mcp --token ${rawToken} -o json`);
|
||||
expect(result.code).toBe(1);
|
||||
const report = JSON.parse(result.stdout.slice(result.stdout.indexOf('{'))) as { error?: string };
|
||||
expect(report.error ?? '').toMatch(/401|revoked|Invalid token/i);
|
||||
}, 20_000);
|
||||
|
||||
it('cleans up test fixtures', () => {
|
||||
if (!gatewayUp) return;
|
||||
run(`delete project ${PROJECT_NAME} --force`);
|
||||
run(`delete project ${OTHER_PROJECT} --force`);
|
||||
expect(knownToolName === undefined || typeof knownToolName === 'string').toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -3,3 +3,5 @@ export * from './validation/index.js';
|
||||
export * from './constants/index.js';
|
||||
export * from './utils/index.js';
|
||||
export * from './secrets/index.js';
|
||||
export * from './tokens/index.js';
|
||||
export * from './mcp-http/index.js';
|
||||
|
||||
246
src/shared/src/mcp-http/index.ts
Normal file
246
src/shared/src/mcp-http/index.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
/**
|
||||
* Reusable Streamable-HTTP MCP client.
|
||||
*
|
||||
* Handles:
|
||||
* - Bearer auth (session tokens or McpToken PATs)
|
||||
* - mcp-session-id round-trip
|
||||
* - Both JSON and text/event-stream response bodies
|
||||
* - JSON-RPC id correlation when a response is multiplexed with notifications
|
||||
*
|
||||
* Used by the smoke suite (`SmokeMcpSession` is a thin wrapper around this)
|
||||
* and by `mcpctl test mcp <url>`.
|
||||
*/
|
||||
import http from 'node:http';
|
||||
import https from 'node:https';
|
||||
|
||||
export interface McpHttpSessionOptions {
|
||||
/** Bearer to send on every request. Accepts raw tokens (no "Bearer " prefix). */
|
||||
bearer?: string;
|
||||
/** Additional headers merged into every request. */
|
||||
headers?: Record<string, string>;
|
||||
/** Timeout per HTTP request in milliseconds. Defaults to 30_000. */
|
||||
timeoutMs?: number;
|
||||
}
|
||||
|
||||
export interface ToolInfo {
|
||||
name: string;
|
||||
description?: string;
|
||||
inputSchema?: unknown;
|
||||
}
|
||||
|
||||
export interface ToolCallResult {
|
||||
content: Array<{ type: string; text?: string }>;
|
||||
isError?: boolean;
|
||||
}
|
||||
|
||||
interface HttpRequestArgs {
|
||||
url: string;
|
||||
method: string;
|
||||
headers?: Record<string, string>;
|
||||
body?: string;
|
||||
timeoutMs?: number;
|
||||
}
|
||||
|
||||
interface HttpRequestResult {
|
||||
status: number;
|
||||
headers: http.IncomingHttpHeaders;
|
||||
body: string;
|
||||
}
|
||||
|
||||
function rawHttpRequest(opts: HttpRequestArgs): Promise<HttpRequestResult> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const parsed = new URL(opts.url);
|
||||
const driver = parsed.protocol === 'https:' ? https : http;
|
||||
const req = driver.request(
|
||||
{
|
||||
hostname: parsed.hostname,
|
||||
port: parsed.port || (parsed.protocol === 'https:' ? 443 : 80),
|
||||
path: parsed.pathname + parsed.search,
|
||||
method: opts.method,
|
||||
headers: opts.headers,
|
||||
timeout: opts.timeoutMs ?? 30_000,
|
||||
},
|
||||
(res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
resolve({
|
||||
status: res.statusCode ?? 0,
|
||||
headers: res.headers,
|
||||
body: Buffer.concat(chunks).toString('utf-8'),
|
||||
});
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
req.on('timeout', () => {
|
||||
req.destroy();
|
||||
reject(new Error('Request timed out'));
|
||||
});
|
||||
if (opts.body) req.write(opts.body);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function parseSse(body: string): unknown[] {
|
||||
const messages: unknown[] = [];
|
||||
for (const line of body.split('\n')) {
|
||||
if (line.startsWith('data: ')) {
|
||||
try {
|
||||
messages.push(JSON.parse(line.slice(6)));
|
||||
} catch {
|
||||
// skip malformed SSE data line
|
||||
}
|
||||
}
|
||||
}
|
||||
return messages;
|
||||
}
|
||||
|
||||
/** Thrown when the server returned a response JSON-RPC error payload. */
|
||||
export class McpProtocolError extends Error {
|
||||
constructor(public readonly code: number, message: string) {
|
||||
super(`MCP error ${code}: ${message}`);
|
||||
this.name = 'McpProtocolError';
|
||||
}
|
||||
}
|
||||
|
||||
/** Thrown when the HTTP layer rejected the request (auth, transport, 5xx). */
|
||||
export class McpTransportError extends Error {
|
||||
constructor(public readonly status: number, public readonly body: string, message?: string) {
|
||||
super(message ?? `HTTP ${status}: ${body.slice(0, 200)}`);
|
||||
this.name = 'McpTransportError';
|
||||
}
|
||||
}
|
||||
|
||||
export class McpHttpSession {
|
||||
private sessionId: string | undefined;
|
||||
private nextId = 1;
|
||||
|
||||
constructor(
|
||||
/** Full URL of the MCP endpoint (e.g. `https://mcp.example.com/projects/foo/mcp`). */
|
||||
public readonly url: string,
|
||||
private readonly options: McpHttpSessionOptions = {},
|
||||
) {}
|
||||
|
||||
private buildHeaders(extra: Record<string, string> = {}): Record<string, string> {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json, text/event-stream',
|
||||
...(this.options.headers ?? {}),
|
||||
...extra,
|
||||
};
|
||||
if (this.sessionId) headers['mcp-session-id'] = this.sessionId;
|
||||
if (this.options.bearer) headers['Authorization'] = `Bearer ${this.options.bearer}`;
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a JSON-RPC request and wait for the response with a matching id.
|
||||
* Handles both single JSON and multiplexed SSE bodies.
|
||||
*/
|
||||
async send(method: string, params: Record<string, unknown> = {}): Promise<unknown> {
|
||||
const id = this.nextId++;
|
||||
const request = { jsonrpc: '2.0', id, method, params };
|
||||
|
||||
const args: HttpRequestArgs = {
|
||||
url: this.url,
|
||||
method: 'POST',
|
||||
headers: this.buildHeaders(),
|
||||
body: JSON.stringify(request),
|
||||
};
|
||||
if (this.options.timeoutMs !== undefined) args.timeoutMs = this.options.timeoutMs;
|
||||
const result = await rawHttpRequest(args);
|
||||
|
||||
if (!this.sessionId) {
|
||||
const sid = result.headers['mcp-session-id'];
|
||||
if (typeof sid === 'string') this.sessionId = sid;
|
||||
}
|
||||
|
||||
if (result.status >= 400) {
|
||||
let message = `HTTP ${result.status}`;
|
||||
try {
|
||||
const body = JSON.parse(result.body) as { error?: string | { message?: string } };
|
||||
const errField = body.error;
|
||||
if (typeof errField === 'string') message = errField;
|
||||
else if (errField && typeof errField === 'object' && typeof errField.message === 'string') message = errField.message;
|
||||
} catch {
|
||||
message = `HTTP ${result.status}: ${result.body.slice(0, 200)}`;
|
||||
}
|
||||
throw new McpTransportError(result.status, result.body, message);
|
||||
}
|
||||
|
||||
const messages = result.headers['content-type']?.includes('text/event-stream')
|
||||
? parseSse(result.body)
|
||||
: [JSON.parse(result.body)];
|
||||
|
||||
const matched = messages.find((m) => {
|
||||
const msg = m as { id?: unknown };
|
||||
return msg.id === id;
|
||||
}) as { result?: unknown; error?: { code: number; message: string } } | undefined;
|
||||
|
||||
const parsed = matched ?? messages[0] as { result?: unknown; error?: { code: number; message: string } } | undefined;
|
||||
if (!parsed) throw new Error(`No response for ${method}`);
|
||||
if (parsed.error) throw new McpProtocolError(parsed.error.code, parsed.error.message);
|
||||
return parsed.result;
|
||||
}
|
||||
|
||||
async sendNotification(method: string, params: Record<string, unknown> = {}): Promise<void> {
|
||||
const notification = { jsonrpc: '2.0', method, params };
|
||||
const args: HttpRequestArgs = {
|
||||
url: this.url,
|
||||
method: 'POST',
|
||||
headers: this.buildHeaders(),
|
||||
body: JSON.stringify(notification),
|
||||
};
|
||||
if (this.options.timeoutMs !== undefined) args.timeoutMs = this.options.timeoutMs;
|
||||
await rawHttpRequest(args).catch(() => { /* best-effort */ });
|
||||
}
|
||||
|
||||
/** MCP `initialize` handshake. */
|
||||
async initialize(): Promise<{ protocolVersion?: string; serverInfo?: { name?: string; version?: string }; capabilities?: unknown }> {
|
||||
return await this.send('initialize', {
|
||||
protocolVersion: '2024-11-05',
|
||||
capabilities: {},
|
||||
clientInfo: { name: 'mcpctl-mcp-http-client', version: '1.0.0' },
|
||||
}) as { protocolVersion?: string; serverInfo?: { name?: string; version?: string }; capabilities?: unknown };
|
||||
}
|
||||
|
||||
/** List tools exposed by the endpoint. */
|
||||
async listTools(): Promise<ToolInfo[]> {
|
||||
const result = await this.send('tools/list') as { tools?: ToolInfo[] };
|
||||
return result.tools ?? [];
|
||||
}
|
||||
|
||||
/** Call a tool and return its `content` payload. */
|
||||
async callTool(name: string, args: Record<string, unknown> = {}): Promise<ToolCallResult> {
|
||||
return await this.send('tools/call', { name, arguments: args }) as ToolCallResult;
|
||||
}
|
||||
|
||||
/** Clean-close the session with a DELETE. Safe to call when no sessionId has been negotiated. */
|
||||
async close(): Promise<void> {
|
||||
if (this.sessionId === undefined) return;
|
||||
await rawHttpRequest({
|
||||
url: this.url,
|
||||
method: 'DELETE',
|
||||
headers: this.buildHeaders(),
|
||||
timeoutMs: 5_000,
|
||||
}).catch(() => { /* best-effort */ });
|
||||
this.sessionId = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/** Best-effort healthcheck against `<base>/healthz`. */
|
||||
export async function mcpHealthCheck(baseUrl: string, timeoutMs = 5_000): Promise<boolean> {
|
||||
try {
|
||||
const res = await rawHttpRequest({ url: `${baseUrl.replace(/\/$/, '')}/healthz`, method: 'GET', timeoutMs });
|
||||
return res.status >= 200 && res.status < 500;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/** Derive `<scheme>://<host>[:port]` from a full MCP endpoint URL (for healthcheck). */
|
||||
export function deriveBaseUrl(mcpUrl: string): string {
|
||||
const u = new URL(mcpUrl);
|
||||
return `${u.protocol}//${u.host}`;
|
||||
}
|
||||
41
src/shared/src/tokens/index.ts
Normal file
41
src/shared/src/tokens/index.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { createHash, randomBytes, timingSafeEqual } from 'node:crypto';
|
||||
|
||||
export const TOKEN_PREFIX = 'mcpctl_pat_';
|
||||
|
||||
// base62 alphabet (URL/header safe, no ambiguous chars across all positions)
|
||||
const BASE62 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
|
||||
|
||||
export interface GeneratedToken {
|
||||
/** The raw token — `mcpctl_pat_<32 base62 chars>`. Shown once at create time; never stored. */
|
||||
raw: string;
|
||||
/** SHA-256 hex digest of the raw value. Persist this, not the raw value. */
|
||||
hash: string;
|
||||
/** First 16 chars of the raw token, safe to display (e.g. in `mcpctl get mcptoken`). */
|
||||
prefix: string;
|
||||
}
|
||||
|
||||
export function generateToken(): GeneratedToken {
|
||||
const bytes = randomBytes(24);
|
||||
let body = '';
|
||||
for (const b of bytes) body += BASE62[b % 62];
|
||||
const raw = TOKEN_PREFIX + body;
|
||||
return { raw, hash: hashToken(raw), prefix: raw.slice(0, 16) };
|
||||
}
|
||||
|
||||
export function hashToken(raw: string): string {
|
||||
return createHash('sha256').update(raw).digest('hex');
|
||||
}
|
||||
|
||||
export function isMcpToken(bearer: string): boolean {
|
||||
return bearer.startsWith(TOKEN_PREFIX);
|
||||
}
|
||||
|
||||
/** Constant-time compare two equal-length hex strings. Returns false on length mismatch. */
|
||||
export function timingSafeEqualHex(a: string, b: string): boolean {
|
||||
if (a.length !== b.length) return false;
|
||||
try {
|
||||
return timingSafeEqual(Buffer.from(a, 'hex'), Buffer.from(b, 'hex'));
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user