Compare commits
15 Commits
3149ea3ae7
...
feat/secre
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
029c3d5f34 | ||
|
|
6946250090 | ||
| 1480d268c7 | |||
|
|
39df459bb1 | ||
|
|
75fe0533c1 | ||
|
|
5d1072889f | ||
|
|
dfc53cd15e | ||
|
|
1887d90821 | ||
|
|
3061a5f6ae | ||
|
|
913678e400 | ||
|
|
f68e123821 | ||
|
|
2127b41d9f | ||
|
|
a151b2e756 | ||
|
|
efcfeeab65 | ||
|
|
2ddb493bb0 |
@@ -12,4 +12,3 @@ dist
|
||||
.env.*
|
||||
deploy/docker-compose.yml
|
||||
src/cli
|
||||
src/mcplocal
|
||||
|
||||
@@ -5,11 +5,11 @@ _mcpctl() {
|
||||
local cur prev words cword
|
||||
_init_completion || return
|
||||
|
||||
local commands="status login logout config get describe delete logs create edit apply patch backup approve console cache"
|
||||
local commands="status login logout config get describe delete logs create edit apply patch backup approve console cache test migrate"
|
||||
local project_commands="get describe delete logs create edit attach-server detach-server"
|
||||
local global_opts="-v --version --daemon-url --direct -p --project -h --help"
|
||||
local resources="servers instances secrets templates projects users groups rbac prompts promptrequests serverattachments proxymodels all"
|
||||
local resource_aliases="servers instances secrets templates projects users groups rbac prompts promptrequests serverattachments proxymodels all server srv instance inst secret sec template tpl project proj user group rbac-definition rbac-binding prompt promptrequest pr serverattachment sa proxymodel pm"
|
||||
local resources="servers instances secrets secretbackends templates projects users groups rbac prompts promptrequests serverattachments proxymodels all"
|
||||
local resource_aliases="servers instances secrets secretbackends templates projects users groups rbac prompts promptrequests serverattachments proxymodels all server srv instance inst secret sec secretbackend sb template tpl project proj user group rbac-definition rbac-binding prompt promptrequest pr serverattachment sa proxymodel pm"
|
||||
|
||||
# Check if --project/-p was given
|
||||
local has_project=false
|
||||
@@ -175,7 +175,7 @@ _mcpctl() {
|
||||
create)
|
||||
local create_sub=$(_mcpctl_get_subcmd $subcmd_pos)
|
||||
if [[ -z "$create_sub" ]]; then
|
||||
COMPREPLY=($(compgen -W "server secret project user group rbac prompt serverattachment promptrequest help" -- "$cur"))
|
||||
COMPREPLY=($(compgen -W "server secret secretbackend project user group rbac mcptoken prompt serverattachment promptrequest help" -- "$cur"))
|
||||
else
|
||||
case "$create_sub" in
|
||||
server)
|
||||
@@ -184,6 +184,9 @@ _mcpctl() {
|
||||
secret)
|
||||
COMPREPLY=($(compgen -W "--data --force -h --help" -- "$cur"))
|
||||
;;
|
||||
secretbackend)
|
||||
COMPREPLY=($(compgen -W "--type --description --default --url --namespace --mount --path-prefix --token-secret --config --force -h --help" -- "$cur"))
|
||||
;;
|
||||
project)
|
||||
COMPREPLY=($(compgen -W "-d --description --proxy-model --prompt --gated --no-gated --server --force -h --help" -- "$cur"))
|
||||
;;
|
||||
@@ -194,7 +197,10 @@ _mcpctl() {
|
||||
COMPREPLY=($(compgen -W "--description --member --force -h --help" -- "$cur"))
|
||||
;;
|
||||
rbac)
|
||||
COMPREPLY=($(compgen -W "--subject --binding --operation --force -h --help" -- "$cur"))
|
||||
COMPREPLY=($(compgen -W "--subject --roleBindings --force -h --help" -- "$cur"))
|
||||
;;
|
||||
mcptoken)
|
||||
COMPREPLY=($(compgen -W "-p --project --rbac --bind --ttl --description --force -h --help" -- "$cur"))
|
||||
;;
|
||||
prompt)
|
||||
COMPREPLY=($(compgen -W "-p --project --content --content-file --priority --link -h --help" -- "$cur"))
|
||||
@@ -311,6 +317,36 @@ _mcpctl() {
|
||||
esac
|
||||
fi
|
||||
return ;;
|
||||
test)
|
||||
local test_sub=$(_mcpctl_get_subcmd $subcmd_pos)
|
||||
if [[ -z "$test_sub" ]]; then
|
||||
COMPREPLY=($(compgen -W "mcp help" -- "$cur"))
|
||||
else
|
||||
case "$test_sub" in
|
||||
mcp)
|
||||
COMPREPLY=($(compgen -W "--token --tool --args --expect-tools --timeout -o --output --no-health -h --help" -- "$cur"))
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=($(compgen -W "-h --help" -- "$cur"))
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
return ;;
|
||||
migrate)
|
||||
local migrate_sub=$(_mcpctl_get_subcmd $subcmd_pos)
|
||||
if [[ -z "$migrate_sub" ]]; then
|
||||
COMPREPLY=($(compgen -W "secrets help" -- "$cur"))
|
||||
else
|
||||
case "$migrate_sub" in
|
||||
secrets)
|
||||
COMPREPLY=($(compgen -W "--from --to --names --keep-source --dry-run -h --help" -- "$cur"))
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=($(compgen -W "-h --help" -- "$cur"))
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
return ;;
|
||||
help)
|
||||
COMPREPLY=($(compgen -W "$commands" -- "$cur"))
|
||||
return ;;
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
# Erase any stale completions from previous versions
|
||||
complete -c mcpctl -e
|
||||
|
||||
set -l commands status login logout config get describe delete logs create edit apply patch backup approve console cache
|
||||
set -l commands status login logout config get describe delete logs create edit apply patch backup approve console cache test migrate
|
||||
set -l project_commands get describe delete logs create edit attach-server detach-server
|
||||
|
||||
# Disable file completions by default
|
||||
@@ -31,10 +31,10 @@ function __mcpctl_has_project
|
||||
end
|
||||
|
||||
# Resource type detection
|
||||
set -l resources servers instances secrets templates projects users groups rbac prompts promptrequests serverattachments proxymodels all
|
||||
set -l resources servers instances secrets secretbackends templates projects users groups rbac prompts promptrequests serverattachments proxymodels all
|
||||
|
||||
function __mcpctl_needs_resource_type
|
||||
set -l resource_aliases servers instances secrets templates projects users groups rbac prompts promptrequests serverattachments proxymodels all server srv instance inst secret sec template tpl project proj user group rbac-definition rbac-binding prompt promptrequest pr serverattachment sa proxymodel pm
|
||||
set -l resource_aliases servers instances secrets secretbackends templates projects users groups rbac prompts promptrequests serverattachments proxymodels all server srv instance inst secret sec secretbackend sb template tpl project proj user group rbac-definition rbac-binding prompt promptrequest pr serverattachment sa proxymodel pm
|
||||
set -l tokens (commandline -opc)
|
||||
set -l found_cmd false
|
||||
for tok in $tokens
|
||||
@@ -59,6 +59,7 @@ function __mcpctl_resolve_resource
|
||||
case server srv servers; echo servers
|
||||
case instance inst instances; echo instances
|
||||
case secret sec secrets; echo secrets
|
||||
case secretbackend sb secretbackends; echo secretbackends
|
||||
case template tpl templates; echo templates
|
||||
case project proj projects; echo projects
|
||||
case user users; echo users
|
||||
@@ -74,7 +75,7 @@ function __mcpctl_resolve_resource
|
||||
end
|
||||
|
||||
function __mcpctl_get_resource_type
|
||||
set -l resource_aliases servers instances secrets templates projects users groups rbac prompts promptrequests serverattachments proxymodels all server srv instance inst secret sec template tpl project proj user group rbac-definition rbac-binding prompt promptrequest pr serverattachment sa proxymodel pm
|
||||
set -l resource_aliases servers instances secrets secretbackends templates projects users groups rbac prompts promptrequests serverattachments proxymodels all server srv instance inst secret sec secretbackend sb template tpl project proj user group rbac-definition rbac-binding prompt promptrequest pr serverattachment sa proxymodel pm
|
||||
set -l tokens (commandline -opc)
|
||||
set -l found_cmd false
|
||||
for tok in $tokens
|
||||
@@ -223,7 +224,7 @@ complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a describe -d 'Show detailed information about a resource'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a delete -d 'Delete a resource (server, instance, secret, project, user, group, rbac)'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a logs -d 'Get logs from an MCP server instance'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a create -d 'Create a resource (server, secret, project, user, group, rbac, serverattachment, prompt)'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a create -d 'Create a resource (server, secret, secretbackend, project, user, group, rbac, serverattachment, prompt)'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a edit -d 'Edit a resource in your default editor (server, project)'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a apply -d 'Apply declarative configuration from a YAML or JSON file'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a patch -d 'Patch a resource field (e.g. mcpctl patch project myproj llmProvider=none)'
|
||||
@@ -231,13 +232,15 @@ complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a approve -d 'Approve a pending prompt request (atomic: delete request, create prompt)'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a console -d 'Interactive MCP console — unified timeline with tools, provenance, and lab replay'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a cache -d 'Manage ProxyModel pipeline cache'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a test -d 'Utilities for testing MCP endpoints and config'
|
||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a migrate -d 'Move resources between backends (currently: secrets between SecretBackends)'
|
||||
|
||||
# Project-scoped commands (with --project)
|
||||
complete -c mcpctl -n "__mcpctl_has_project; and not __fish_seen_subcommand_from $project_commands" -a get -d 'List resources (servers, projects, instances, all)'
|
||||
complete -c mcpctl -n "__mcpctl_has_project; and not __fish_seen_subcommand_from $project_commands" -a describe -d 'Show detailed information about a resource'
|
||||
complete -c mcpctl -n "__mcpctl_has_project; and not __fish_seen_subcommand_from $project_commands" -a delete -d 'Delete a resource (server, instance, secret, project, user, group, rbac)'
|
||||
complete -c mcpctl -n "__mcpctl_has_project; and not __fish_seen_subcommand_from $project_commands" -a logs -d 'Get logs from an MCP server instance'
|
||||
complete -c mcpctl -n "__mcpctl_has_project; and not __fish_seen_subcommand_from $project_commands" -a create -d 'Create a resource (server, secret, project, user, group, rbac, serverattachment, prompt)'
|
||||
complete -c mcpctl -n "__mcpctl_has_project; and not __fish_seen_subcommand_from $project_commands" -a create -d 'Create a resource (server, secret, secretbackend, project, user, group, rbac, serverattachment, prompt)'
|
||||
complete -c mcpctl -n "__mcpctl_has_project; and not __fish_seen_subcommand_from $project_commands" -a edit -d 'Edit a resource in your default editor (server, project)'
|
||||
complete -c mcpctl -n "__mcpctl_has_project; and not __fish_seen_subcommand_from $project_commands" -a attach-server -d 'Attach a server to a project (requires --project)'
|
||||
complete -c mcpctl -n "__mcpctl_has_project; and not __fish_seen_subcommand_from $project_commands" -a detach-server -d 'Detach a server from a project (requires --project)'
|
||||
@@ -280,13 +283,15 @@ complete -c mcpctl -n "__mcpctl_subcmd_active config claude-generate" -l stdout
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active config impersonate" -l quit -d 'Stop impersonating and return to original identity'
|
||||
|
||||
# create subcommands
|
||||
set -l create_cmds server secret project user group rbac prompt serverattachment promptrequest
|
||||
set -l create_cmds server secret secretbackend project user group rbac mcptoken prompt serverattachment promptrequest
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a server -d 'Create an MCP server definition'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a secret -d 'Create a secret'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a secretbackend -d 'Create a secret backend (plaintext, openbao)'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a project -d 'Create a project'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a user -d 'Create a user'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a group -d 'Create a group'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a rbac -d 'Create an RBAC binding definition'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a mcptoken -d 'Create a project-scoped API token for HTTP-mode mcplocal. The raw token is printed once.'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a prompt -d 'Create an approved prompt'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a serverattachment -d 'Attach a server to a project'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a promptrequest -d 'Create a prompt request (pending proposal that needs approval)'
|
||||
@@ -311,6 +316,18 @@ complete -c mcpctl -n "__mcpctl_subcmd_active create server" -l force -d 'Update
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create secret" -l data -d 'Secret data KEY=value (repeat for multiple)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create secret" -l force -d 'Update if already exists'
|
||||
|
||||
# create secretbackend options
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create secretbackend" -l type -d 'Backend type (plaintext, openbao)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create secretbackend" -l description -d 'Description' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create secretbackend" -l default -d 'Promote this backend to default (atomically demotes the current one)'
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create secretbackend" -l url -d 'openbao: vault URL (e.g. http://bao.example:8200)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create secretbackend" -l namespace -d 'openbao: X-Vault-Namespace header value' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create secretbackend" -l mount -d 'openbao: KV v2 mount point (default: secret)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create secretbackend" -l path-prefix -d 'openbao: path prefix under mount (default: mcpctl)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create secretbackend" -l token-secret -d 'openbao: token secret reference in SECRET/KEY form (e.g. bao-creds/token)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create secretbackend" -l config -d 'Extra config as key=value (repeat for multiple)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create secretbackend" -l force -d 'Update if already exists'
|
||||
|
||||
# create project options
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create project" -s d -l description -d 'Project description' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l proxy-model -d 'Plugin name (default, content-pipeline, gate, none)' -x
|
||||
@@ -332,10 +349,17 @@ complete -c mcpctl -n "__mcpctl_subcmd_active create group" -l force -d 'Update
|
||||
|
||||
# create rbac options
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create rbac" -l subject -d 'Subject as Kind:name (repeat for multiple)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create rbac" -l binding -d 'Role binding as role:resource (e.g. edit:servers, run:projects)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create rbac" -l operation -d 'Operation binding (e.g. logs, backup)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create rbac" -l roleBindings -d 'Role binding as key:value pairs, e.g. "role:view,resource:servers" or "role:view,resource:servers,name:my-ha" or "action:logs" (repeat for multiple)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create rbac" -l force -d 'Update if already exists'
|
||||
|
||||
# create mcptoken options
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create mcptoken" -s p -l project -d 'Project this token is bound to' -xa '(__mcpctl_project_names)'
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create mcptoken" -l rbac -d 'Base RBAC: \'empty\' (default, no bindings) or \'clone\' (snapshot creator\'s perms)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create mcptoken" -l bind -d 'Additional role binding as key:value pairs, e.g. "role:view,resource:servers" or "action:logs" (repeat for multiple). Creator perms are the ceiling.' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create mcptoken" -l ttl -d 'Expiry: \'30d\', \'12h\', \'never\', or an ISO8601 datetime' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create mcptoken" -l description -d 'Freeform description' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create mcptoken" -l force -d 'Revoke any existing active token with this name, then create a new one'
|
||||
|
||||
# create prompt options
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create prompt" -s p -l project -d 'Project name to scope the prompt to' -xa '(__mcpctl_project_names)'
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active create prompt" -l content -d 'Prompt content text' -x
|
||||
@@ -369,6 +393,30 @@ complete -c mcpctl -n "__fish_seen_subcommand_from cache; and not __fish_seen_su
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active cache clear" -l older-than -d 'Clear entries older than N days' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active cache clear" -s y -l yes -d 'Skip confirmation'
|
||||
|
||||
# test subcommands
|
||||
set -l test_cmds mcp
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from test; and not __fish_seen_subcommand_from $test_cmds" -a mcp -d 'Verify a Streamable-HTTP MCP endpoint: health, initialize, tools/list, optionally call a tool.'
|
||||
|
||||
# test mcp options
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -l token -d 'Bearer token (also reads $MCPCTL_TOKEN)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -l tool -d 'Invoke a specific tool after listing' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -l args -d 'JSON-encoded arguments for --tool' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -l expect-tools -d 'Comma-separated tool names that MUST appear; fails otherwise' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -l timeout -d 'Per-request timeout in seconds' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -s o -l output -d 'Output format: text or json' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active test mcp" -l no-health -d 'Skip the /healthz preflight check'
|
||||
|
||||
# migrate subcommands
|
||||
set -l migrate_cmds secrets
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from migrate; and not __fish_seen_subcommand_from $migrate_cmds" -a secrets -d 'Migrate secrets from one SecretBackend to another'
|
||||
|
||||
# migrate secrets options
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active migrate secrets" -l from -d 'Source SecretBackend name' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active migrate secrets" -l to -d 'Destination SecretBackend name' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active migrate secrets" -l names -d 'Comma-separated secret names (default: all)' -x
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active migrate secrets" -l keep-source -d 'Leave the source copy intact (default: delete from source after write+commit)'
|
||||
complete -c mcpctl -n "__mcpctl_subcmd_active migrate secrets" -l dry-run -d 'Show which secrets would be migrated without touching them'
|
||||
|
||||
# status options
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from status" -s o -l output -d 'output format (table, json, yaml)' -x
|
||||
|
||||
|
||||
60
deploy/Dockerfile.mcplocal
Normal file
60
deploy/Dockerfile.mcplocal
Normal file
@@ -0,0 +1,60 @@
|
||||
# HTTP-only mcplocal for k8s deploy (Service `mcp`, Ingress `mcp.ad.itaz.eu`).
|
||||
# Container CMD runs the `serve.ts` entry which — unlike the systemd/STDIO
|
||||
# entry — has no stdin/stdout MCP client and bootstraps exclusively from env.
|
||||
|
||||
# Stage 1: Build TypeScript
|
||||
FROM node:20-alpine AS builder
|
||||
|
||||
RUN corepack enable && corepack prepare pnpm@9.15.0 --activate
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy workspace config and package manifests
|
||||
COPY pnpm-workspace.yaml pnpm-lock.yaml package.json tsconfig.base.json ./
|
||||
COPY src/mcplocal/package.json src/mcplocal/tsconfig.json src/mcplocal/
|
||||
COPY src/shared/package.json src/shared/tsconfig.json src/shared/
|
||||
COPY src/db/package.json src/db/tsconfig.json src/db/
|
||||
|
||||
# Install all dependencies
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
# Copy source
|
||||
COPY src/mcplocal/src/ src/mcplocal/src/
|
||||
COPY src/shared/src/ src/shared/src/
|
||||
COPY src/db/src/ src/db/src/
|
||||
COPY src/db/prisma/ src/db/prisma/
|
||||
|
||||
# Build (mcplocal depends on shared; db is pulled transitively by shared/... actually
|
||||
# mcplocal does not depend on db at runtime — prisma client is only used by mcpd).
|
||||
RUN pnpm -F @mcpctl/shared build && pnpm -F @mcpctl/mcplocal build
|
||||
|
||||
# Stage 2: Production runtime
|
||||
FROM node:20-alpine
|
||||
|
||||
RUN corepack enable && corepack prepare pnpm@9.15.0 --activate
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy workspace config, manifests, and lockfile
|
||||
COPY pnpm-workspace.yaml pnpm-lock.yaml package.json ./
|
||||
COPY src/mcplocal/package.json src/mcplocal/
|
||||
COPY src/shared/package.json src/shared/
|
||||
|
||||
# Install deps (production only — no db / prisma runtime here).
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
# Copy built output
|
||||
COPY --from=builder /app/src/shared/dist/ src/shared/dist/
|
||||
COPY --from=builder /app/src/mcplocal/dist/ src/mcplocal/dist/
|
||||
|
||||
EXPOSE 3200
|
||||
|
||||
# Cache directory — expected to be mounted as a PVC in k8s.
|
||||
VOLUME /var/lib/mcplocal/cache
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=5s --retries=3 --start-period=10s \
|
||||
CMD wget -q --spider http://localhost:3200/healthz || exit 1
|
||||
|
||||
# MCPLOCAL_MCPD_URL and MCPLOCAL_MCPD_TOKEN are required and must come from
|
||||
# the Pulumi-managed Secret. Other env vars default sensibly.
|
||||
CMD ["node", "src/mcplocal/dist/serve.js"]
|
||||
174
docs/mcptoken-implementation.md
Normal file
174
docs/mcptoken-implementation.md
Normal file
@@ -0,0 +1,174 @@
|
||||
# mcptoken + HTTP-mode mcplocal — implementation log
|
||||
|
||||
Companion to the approved plan at `/home/michal/.claude/plans/lets-discuss-something-i-bright-lovelace.md`.
|
||||
This file is updated as each milestone lands, so you can review what was actually done vs. what was planned.
|
||||
|
||||
## Context (why)
|
||||
|
||||
You're running your own vLLM inference outside Claude Code and want it to consume mcpctl over MCP with the same UX Claude gets: project-scoped server discovery, proxy models, the pipeline cache. Today `mcplocal` is systemd-only and serves STDIO — unreachable from off-host and unauthenticated. This work adds:
|
||||
|
||||
1. A containerized, network-accessible `mcplocal` serving Streamable HTTP.
|
||||
2. A new `McpToken` resource (CLI: `mcpctl get/create/delete mcptoken`) — project-scoped bearer tokens with the same RBAC stack as users. Hashed at rest; raw value shown once.
|
||||
3. Tokens as a first-class RBAC subject kind (`McpToken:<sha>`), with a creator-permission ceiling so non-admins cannot mint escalated tokens.
|
||||
4. k8s deploy (Service `mcp`, Ingress `mcp.ad.itaz.eu`, PVC-backed `FileCache`).
|
||||
5. A CLI breaking change: `mcpctl create rbac --binding edit:servers` → `--roleBindings role:edit,resource:servers`. You explicitly asked for this; only one command uses it.
|
||||
6. A product-grade `mcpctl test mcp <url>` verb for validating any Streamable-HTTP MCP endpoint, reused by smoke tests.
|
||||
|
||||
## Branch
|
||||
|
||||
All work lives on `feat/mcptoken` (off `main` at `3149ea3`).
|
||||
|
||||
## Pre-work committed to main (outside this branch)
|
||||
|
||||
Before starting the feature, we flushed your in-flight changes to main so they wouldn't travel with the branch:
|
||||
|
||||
- **`3149ea3 fix: MCP proxy resilience — discovery cache, default liveness probes`** — per-server `tools/list` cache in `McpRouter` with positive+negative TTL so dead upstreams only stall the first call; default liveness probe (tools/list through the real production path) applied to any RUNNING instance without an explicit healthCheck. Already pushed to origin.
|
||||
|
||||
## Status legend
|
||||
|
||||
- ✅ done
|
||||
- 🚧 in progress
|
||||
- ⬜ not started
|
||||
|
||||
## PR 1 — Schema + token helpers + mcpd CRUD routes ✅
|
||||
|
||||
| # | Step | Status |
|
||||
|---|---|---|
|
||||
| 1 | `McpToken` Prisma model + Project/User reverse relations; `AuditEvent.tokenName` / `tokenSha` + index | ✅ |
|
||||
| 2 | `src/shared/src/tokens/index.ts` — `generateToken`, `hashToken`, `isMcpToken`, `timingSafeEqualHex`, `TOKEN_PREFIX` | ✅ |
|
||||
| 3 | `src/mcpd/src/repositories/mcp-token.repository.ts` + new interfaces in `repositories/interfaces.ts` | ✅ |
|
||||
| 4 | `src/mcpd/src/services/mcp-token.service.ts` — creator-ceiling via `rbacService.canAccess`/`canRunOperation`, raw token returned only once, auto-creates an `RbacDefinition` with subject `McpToken:<sha>` when bindings are non-empty | ✅ |
|
||||
| 5 | `src/mcpd/src/routes/mcp-tokens.ts` — POST / GET / GET:id / DELETE:id + POST:id/revoke + GET /introspect | ✅ |
|
||||
| 6 | Wired into `main.ts` — repo/service constructed, routes registered, `mcptokens` added to URL→permission map + name resolver; `/mcptokens/introspect` added to auth-skip list so mcplocal can call it with a raw McpToken bearer | ✅ |
|
||||
| 7 | RBAC extensions: new subject kind `McpToken` in `rbac-definition.schema.ts`; `mcptokens` added to `RBAC_RESOURCES` and `RESOURCE_ALIASES`; `rbac.service.ts` threads optional `mcpTokenSha` through `canAccess`, `canRunOperation`, `getAllowedScope`, `getPermissions`; resolver matches `{kind:'McpToken', name: sha}` | ✅ |
|
||||
| 8 | Unit tests — `tests/mcp-token-service.test.ts` covering: empty/clone modes, ceiling rejection, RbacDefinition auto-create with correct `McpToken:<sha>` subject, duplicate-name conflict, introspect valid/revoked/expired/unknown, revoke deletes the RbacDefinition. 11/11 green. Full mcpd suite still 648/648. | ✅ |
|
||||
|
||||
### What this PR does NOT do yet (coming in PR 3)
|
||||
|
||||
- The mcpd **auth middleware** does not yet dispatch on the token prefix. A raw `mcpctl_pat_…` bearer sent to any `/api/v1/*` endpoint (other than `/introspect`) is still rejected as an invalid session. That's intentional — PR 3 extends `middleware/auth.ts` to recognize both session bearers and McpToken bearers.
|
||||
- No CLI yet. Tokens can be created only via `POST /api/v1/mcptokens` for now.
|
||||
|
||||
## PR 2 — RBAC CLI migration ✅
|
||||
|
||||
Migrated `mcpctl create rbac` from positional flag syntax to the key=value form you asked for.
|
||||
|
||||
Before:
|
||||
```
|
||||
mcpctl create rbac developers \
|
||||
--subject User:alice@test.com \
|
||||
--binding edit:servers \
|
||||
--binding view:servers:my-ha \
|
||||
--operation logs
|
||||
```
|
||||
After:
|
||||
```
|
||||
mcpctl create rbac developers \
|
||||
--subject User:alice@test.com \
|
||||
--roleBindings role:edit,resource:servers \
|
||||
--roleBindings role:view,resource:servers,name:my-ha \
|
||||
--roleBindings action:logs
|
||||
```
|
||||
|
||||
| # | Step | Status |
|
||||
|---|---|---|
|
||||
| 1 | New shared parser at `src/cli/src/commands/rbac-bindings.ts` exporting `parseRoleBinding(entry)` | ✅ |
|
||||
| 2 | `src/cli/src/commands/create.ts` — old `--binding`/`--operation` flags replaced with one repeatable `--roleBindings <kv>`. Uses the new parser. | ✅ |
|
||||
| 3 | Tests in `src/cli/tests/commands/create.test.ts` rewritten to the new form (8 RBAC tests updated) | ✅ |
|
||||
| 4 | New dedicated unit test `src/cli/tests/commands/rbac-bindings.test.ts` — 9 cases covering unscoped / name-scoped / action / trim / empty-value / unknown-key / action-conflict / missing-role rejections | ✅ |
|
||||
| 5 | Shell completions regenerated via `pnpm completions:generate` — both `completions/mcpctl.{bash,fish}` now offer `--roleBindings`, no longer `--binding`/`--operation` | ✅ |
|
||||
| 6 | Nothing in `docs/` or `README.md` referenced the old flags | ✅ |
|
||||
|
||||
Full CLI suite still 406/406 green. On-disk YAML shape (`roleBindings: [...]`) is unchanged, so backups and existing `apply -f` files keep working.
|
||||
|
||||
The extracted `parseRoleBinding` helper is what PR 3's `mcpctl create mcptoken --bind <kv>` flag will reuse.
|
||||
|
||||
## PR 3 — CLI mcptoken verbs + mcpd auth dispatch + audit ✅
|
||||
|
||||
| # | Step | Status |
|
||||
|---|---|---|
|
||||
| 1 | `src/mcpd/src/middleware/auth.ts` — dispatch on the bearer prefix. `mcpctl_pat_…` → new `findMcpToken(hash)` dep → populates `request.mcpToken` + `request.userId = ownerId`. Other bearers → existing `findSession` path. Returns 401 for revoked, expired, or unknown tokens. Fastify module augmentation adds `request.mcpToken?: McpTokenPrincipal`. | ✅ |
|
||||
| 2 | `src/mcpd/src/main.ts` — wires `findMcpToken: mcpTokenRepo.findByHash`. Threads `mcpTokenSha` into `canAccess` / `canRunOperation` / `getAllowedScope`. Adds a second project-scope check: `McpToken` principals can only reach resources inside their bound project (additional guard on top of the route handler checks). | ✅ |
|
||||
| 3 | New auth tests (`tests/auth.test.ts`) — 3 McpToken dispatch cases: happy path sets userId + mcpToken, revoked → 401, no findMcpToken wired → 401. Session path unchanged. | ✅ |
|
||||
| 4 | `mcpctl create mcptoken <name> -p <proj> [--rbac empty\|clone] [--bind …] [--ttl …]` — new subcommand. Reuses `parseRoleBinding` from PR 2. `parseTtl` helper accepts `30d`/`12h`/`never`/ISO8601. `--force` revokes the existing active token and creates a new one. Raw token is printed once with a "copy now" banner. | ✅ |
|
||||
| 5 | `mcpctl get mcptokens` + `mcpctl get mcptoken <name> -p <proj>` + `mcpctl describe mcptoken <name> -p <proj>` + `mcpctl delete mcptoken <name> -p <proj>`. Names are project-scoped, so all verbs require `-p` unless a CUID is passed. Table columns: NAME / PROJECT / PREFIX / CREATED / LAST USED / EXPIRES / STATUS. Describe surfaces the auto-created RbacDefinition's bindings (matched by `mcptoken-<id>` name convention). | ✅ |
|
||||
| 6 | `mcpctl apply -f` — added `McpTokenSpecSchema`, `mcpton: 'mcptokens'` in `KIND_TO_RESOURCE`, and an applier that creates if missing or logs "already active — skipped" (tokens are immutable). Raw token printed on create. | ✅ |
|
||||
| 7 | Resource aliases — `mcptoken`/`mcptokens`/`token`/`tokens` all resolve to `mcptokens`. `stripInternalFields` scrubs the secret and derived fields and promotes `projectName` → `project` for YAML round-trip. | ✅ |
|
||||
| 8 | Audit pipeline — `src/mcplocal/src/audit/types.ts` gains `tokenName?`/`tokenSha?`; collector gets `setSessionMcpToken(sessionId, {tokenName, tokenSha})` alongside `setSessionUserName`, both merged into a per-session principal map. `src/mcpd/src/services/audit-event.service.ts` accepts `tokenName` and `tokenSha` query params (repo already extended in PR 1). `console/audit-types.ts` carries the new optional fields so the TUI can surface them in a follow-up. | ✅ |
|
||||
| 9 | Shell completions regenerated — `mcpctl create mcptoken` flags (`--project`, `--rbac`, `--bind`, `--ttl`, `--description`, `--force`) and the new resource alias land in both bash and fish completions. `completions.test.ts` freshness check passes. | ✅ |
|
||||
|
||||
### What this PR does NOT do yet (coming in PR 4)
|
||||
|
||||
- No HTTP-mode mcplocal binary yet. Tokens can be used to hit mcpd directly via `/api/v1/…` with `Authorization: Bearer mcpctl_pat_…`, but the containerized `/projects/<p>/mcp` endpoint and its token-auth preHandler don't exist yet.
|
||||
- The audit-console TUI still shows only `userName` columns; adding a `TOKEN` column is a UI polish follow-up.
|
||||
|
||||
### Test stats
|
||||
|
||||
- 1764/1764 tests pass workspace-wide (up from ~1750 before PR 3).
|
||||
- Build clean across all 5 packages.
|
||||
- Completions freshness check green.
|
||||
|
||||
## PR 4 — HTTP-mode mcplocal + container + `mcpctl test mcp` + smoke ✅
|
||||
|
||||
| # | Step | Status |
|
||||
|---|---|---|
|
||||
| 1 | **Shared HTTP MCP client** — `src/shared/src/mcp-http/index.ts`. `McpHttpSession(url, {bearer?, headers?, timeoutMs?})` with `initialize / listTools / callTool / close / send / sendNotification`. Handles http + https, multiplexed SSE bodies, JSON-RPC id correlation. Distinct `McpProtocolError` / `McpTransportError` classes for contract-vs-transport failures. Plus `deriveBaseUrl(url)` + `mcpHealthCheck(base)`. Exported from `@mcpctl/shared`. | ✅ |
|
||||
| 2 | **`mcpctl test mcp <url>`** — new CLI verb under `src/cli/src/commands/test-mcp.ts`. Flags: `--token` (also reads `$MCPCTL_TOKEN`), `--tool`, `--args` (JSON), `--expect-tools`, `--timeout`, `-o text\|json`, `--no-health`. Exit codes: 0 PASS, 1 TRANSPORT/AUTH FAIL, 2 CONTRACT FAIL (e.g. missing tool or `isError=true`). | ✅ |
|
||||
| 3 | **Unit tests** for the verb — `src/cli/tests/commands/test-mcp.test.ts`. 9 cases: happy path, health preflight failure, `--expect-tools` miss / hit, transport throw, `--tool` + `isError` → exit 2, `-o json` report, `$MCPCTL_TOKEN` env fallback, invalid `--args`. All green. | ✅ |
|
||||
| 4 | **`src/mcplocal/src/serve.ts`** — new HTTP-only entry. Drops `StdioProxyServer` and `--upstream`; forces host/port from `MCPLOCAL_HTTP_HOST`/`MCPLOCAL_HTTP_PORT`; requires `MCPLOCAL_MCPD_URL`. Registers a Fastify preHandler that runs the new `token-auth` middleware on `/projects/*` and `/mcp`. Preserves LLM provider loading + proxymodel hot-reload watchers. | ✅ |
|
||||
| 5 | **`src/mcplocal/src/http/token-auth.ts`** — Fastify preHandler that validates `mcpctl_pat_…` bearers by calling `GET <mcpd>/api/v1/mcptokens/introspect`. Cache: 30s positive / 5s negative TTL keyed on `hashToken(raw)`. Rejects non-Bearer, non-`mcpctl_pat_`, revoked, expired, and wrong-project (403 when path `projectName` ≠ token's bound project). Sets `request.mcpToken = { tokenName, tokenSha, projectName }` for the audit collector. | ✅ |
|
||||
| 6 | **FileCache PVC plumbing** — `src/mcplocal/src/http/project-mcp-endpoint.ts` now honours `process.env.MCPLOCAL_CACHE_DIR` at both `FileCache` construction sites (gated + dynamic). No constructor change needed — `FileCache` already accepted a `dir` config; we just wire the env-derived value through. | ✅ |
|
||||
| 7 | **Audit collector integration** — when `request.mcpToken` is set, the `onsessioninitialized` handler in `project-mcp-endpoint.ts` now also calls `collector.setSessionMcpToken(id, {tokenName, tokenSha})` alongside the existing `setSessionUserName`. Session map from PR 3 merges both principals. | ✅ |
|
||||
| 8 | **Container image** — `deploy/Dockerfile.mcplocal` mirrors `Dockerfile.mcpd` shape: multi-stage Node 20 Alpine, pnpm workspace build of `@mcpctl/shared` + `@mcpctl/mcplocal`, runtime `CMD node src/mcplocal/dist/serve.js`, `EXPOSE 3200`, `VOLUME /var/lib/mcplocal/cache`, `HEALTHCHECK` on `/healthz`. | ✅ |
|
||||
| 9 | **Build + push script** — `scripts/build-mcplocal.sh` (executable, 755) mirrors `build-mcpd.sh`. Pushes to `10.0.0.194:3012/michal/mcplocal:latest`. | ✅ |
|
||||
| 10 | **`fulldeploy.sh`** — now a 4-step pipeline: (1) build + push mcpd, (2) build + push mcplocal, (3) rollout both deployments on k8s (mcplocal gated behind a `kubectl get deployment/mcplocal` check so the script stays green before the Pulumi stack lands), (4) RPM release. Smoke suite runs at the end as before. | ✅ |
|
||||
| 11 | **`mcpctl test mcp` + new create flags in completions** — bash + fish regenerated. `src/mcplocal/package.json` gains a `serve` script for convenience. | ✅ |
|
||||
| 12 | **Smoke test** — `src/mcplocal/tests/smoke/mcptoken.smoke.test.ts`. Gated on `healthz($MCPGW_URL)`; skipped with a clear warning if the gateway is unreachable. Scenarios: happy path via `mcpctl test mcp` → exit 0; cross-project → exit 1 with a 403 message; `--expect-tools __nonexistent__` → exit 2; delete-then-retry after the 5s negative-cache window → exit 1 with 401. Cleans up both projects at the end. | ✅ |
|
||||
|
||||
### Deploy-time steps still owed (outside this repo)
|
||||
|
||||
- **Pulumi (`../kubernetes-deployment`, stack `homelab`)** — add a `Deployment` named `mcplocal` in ns `mcpctl` pointing at `10.0.0.194:3012/michal/mcplocal:latest` (internal registry), a `Service` named `mcp` (port 3200→80, ClusterIP), an `Ingress` for `mcp.ad.itaz.eu` with TLS via the existing cluster-issuer, a PVC `mcplocal-cache` (10Gi RWO, mounted `/var/lib/mcplocal/cache`), and a NetworkPolicy mirroring mcpd's. Required env: **just `MCPLOCAL_MCPD_URL`** (point at `http://mcpd.mcpctl.svc.cluster.local:3100`). Optionally `MCPLOCAL_TOKEN_POSITIVE_TTL_MS` / `MCPLOCAL_TOKEN_NEGATIVE_TTL_MS` for stricter revocation. `fulldeploy.sh` already runs `pulumi preview` first and halts on drift.
|
||||
- **No pod-level secret required** (revised from earlier draft) — the pod has no persistent identity to mcpd. Every inbound `Authorization: Bearer mcpctl_pat_…` is forwarded verbatim to mcpd, and mcpd's auth middleware resolves the McpToken principal. This eliminates the original `MCPLOCAL_MCPD_TOKEN` secret and its rotation story. Trade-off: a token with `--rbac=empty` can't read `/api/v1/projects/:name/servers`, but it also can't meaningfully serve MCP, so this is the right failure mode. See `src/mcplocal/src/serve.ts` header comment.
|
||||
- **LLM provider config** — if any project served by this pod is `gated: true`, mount your `~/.mcpctl/config.json` as a ConfigMap at `/root/.mcpctl/config.json`. Ungated projects (proxyModel `content-pipeline` or no LLM-driven stages) need nothing.
|
||||
|
||||
### Test stats
|
||||
|
||||
- 1773/1773 workspace tests pass (up from 1764 before PR 4).
|
||||
- All five packages build clean.
|
||||
- Shell completions fresh.
|
||||
- `mcpctl test mcp --help` and `mcpctl create mcptoken --help` render expected surfaces.
|
||||
|
||||
## End-to-end verification (manual, after Pulumi resources land)
|
||||
|
||||
```bash
|
||||
# From a workstation outside the k8s cluster:
|
||||
mcpctl create project vllm --force
|
||||
TOK=$(mcpctl create mcptoken vllm-token --project vllm --rbac clone | grep mcpctl_pat_)
|
||||
export MCPCTL_TOKEN="$TOK"
|
||||
|
||||
# Probe the public gateway
|
||||
mcpctl test mcp https://mcp.ad.itaz.eu/projects/vllm/mcp --expect-tools begin_session
|
||||
|
||||
# Negative: wrong project → exit 1
|
||||
mcpctl test mcp https://mcp.ad.itaz.eu/projects/other/mcp
|
||||
echo $? # 1
|
||||
|
||||
# Audit — the call should be tagged with tokenName=vllm-token
|
||||
mcpctl console --audit # look for the TOKEN column once the TUI patch lands
|
||||
```
|
||||
|
||||
## Design decisions recap (so you don't have to re-read the plan)
|
||||
|
||||
| Decision | Choice |
|
||||
|---|---|
|
||||
| Transport | Streamable HTTP only |
|
||||
| Binary shape | Same `@mcpctl/mcplocal` package, two entry files (`main.ts` STDIO, `serve.ts` HTTP) |
|
||||
| Container runtime | Node (not bun-compiled) — mirrors mcpd |
|
||||
| Cache | PVC at `/var/lib/mcplocal/cache` |
|
||||
| Hostname | k8s Service `mcp`, Ingress `mcp.ad.itaz.eu` |
|
||||
| Token format | `mcpctl_pat_<32-byte base62>`, stored as SHA-256, shown-once at create |
|
||||
| Resource | `McpToken`, CLI noun `mcptoken`, one-project-per-token, FK cascade |
|
||||
| Subject kind | New `McpToken:<sha>` |
|
||||
| TTL | No default. Optional `--ttl 30d` / `never` / ISO date |
|
||||
| Default bindings | `--rbac=empty` (default), `--rbac=clone`, `--bind <kv>` — creator ceiling enforced server-side |
|
||||
| Binding CLI | `--roleBindings role:view,resource:servers[,name:foo]` or `--roleBindings action:logs` |
|
||||
| Project enforcement | Endpoint visibility only (no strict create-time check) — same mechanism Claude uses |
|
||||
167
docs/secret-backends.md
Normal file
167
docs/secret-backends.md
Normal file
@@ -0,0 +1,167 @@
|
||||
# Secret backends
|
||||
|
||||
`mcpctl` stores the raw data for `Secret` resources in a pluggable **backend**.
|
||||
The default is `plaintext` — the secret payload lives in Postgres as plain JSON
|
||||
— which is fine for laptop development but a poor fit for shared clusters. For
|
||||
production, point at an external KV store and delete secrets from the DB after
|
||||
migration.
|
||||
|
||||
This guide covers the model, the shipped drivers, and how to migrate without
|
||||
downtime.
|
||||
|
||||
## Model
|
||||
|
||||
- A `SecretBackend` resource is a single named driver instance (e.g. a pointer
|
||||
at one OpenBao deployment).
|
||||
- Every `Secret` row carries a `backendId` FK — the backend that owns its data.
|
||||
- Exactly one `SecretBackend` has `isDefault: true`. New secrets created through
|
||||
the API/CLI land on that backend.
|
||||
- The `plaintext` backend is seeded at startup and named `default`. It cannot
|
||||
be deleted — there needs to always be one row where the driver's own
|
||||
credentials can bootstrap from (see below).
|
||||
|
||||
## CLI
|
||||
|
||||
```bash
|
||||
mcpctl get secretbackends # list backends
|
||||
mcpctl describe secretbackend <name> # inspect config (credentials masked)
|
||||
mcpctl create secretbackend <name> --type plaintext [--default] [--description ...]
|
||||
mcpctl create secretbackend <name> --type openbao \
|
||||
--url http://bao.example:8200 \
|
||||
--token-secret bao-creds/token \
|
||||
[--namespace <ns>] [--mount secret] [--path-prefix mcpctl] \
|
||||
[--default]
|
||||
mcpctl delete secretbackend <name> # blocked if any secret still points at it
|
||||
|
||||
mcpctl migrate secrets --from default --to bao
|
||||
mcpctl migrate secrets --from default --to bao --names a,b --keep-source
|
||||
mcpctl migrate secrets --from default --to bao --dry-run
|
||||
```
|
||||
|
||||
Anything you can do with `create secretbackend` also works via `apply -f`:
|
||||
|
||||
```yaml
|
||||
kind: secretbackend
|
||||
name: bao
|
||||
type: openbao
|
||||
description: "shared cluster OpenBao"
|
||||
isDefault: true
|
||||
config:
|
||||
url: http://bao.svc.cluster.local:8200
|
||||
tokenSecretRef: { name: bao-creds, key: token }
|
||||
namespace: platform
|
||||
```
|
||||
|
||||
## Drivers
|
||||
|
||||
### plaintext
|
||||
|
||||
Trivial. `Secret.data` holds the JSON, `externalRef` is empty.
|
||||
|
||||
- Storage: Postgres column.
|
||||
- Bootstrap: seeded as `default` at startup.
|
||||
- Cost: zero setup, zero encryption at rest, full access for any DB reader.
|
||||
|
||||
Use for development, CI, or single-tenant self-hosts where the DB itself is
|
||||
treated as sensitive.
|
||||
|
||||
### openbao
|
||||
|
||||
Talks HTTP to an [OpenBao](https://openbao.org) (MPL 2.0 Vault fork) KV v2
|
||||
mount. Also compatible with HashiCorp Vault KV v2 — the wire protocol is the
|
||||
same.
|
||||
|
||||
| Config key | Required? | Description |
|
||||
|------------------|-----------|-------------|
|
||||
| `url` | yes | Base URL, e.g. `http://bao.svc.cluster.local:8200`. |
|
||||
| `tokenSecretRef` | yes | `{ name, key }` pointing at a `Secret` on the **plaintext** backend that holds the bootstrap token. |
|
||||
| `mount` | no | KV v2 mount name. Default `secret`. |
|
||||
| `pathPrefix` | no | Path prefix under the mount. Default `mcpctl`. Secrets land at `<mount>/<pathPrefix>/<secretName>`. |
|
||||
| `namespace` | no | `X-Vault-Namespace` header for OpenBao/Vault Enterprise namespaces. |
|
||||
|
||||
The driver only stores a reference in `Secret.externalRef` (`mount/path`). The
|
||||
`Secret.data` column is left empty for openbao-backed rows — you can safely
|
||||
drop DB-level access to secrets after migration.
|
||||
|
||||
#### Required OpenBao policy
|
||||
|
||||
Minimum token policy for a backend that lives at `secret/mcpctl/`:
|
||||
|
||||
```hcl
|
||||
path "secret/data/mcpctl/*" {
|
||||
capabilities = ["create", "read", "update"]
|
||||
}
|
||||
|
||||
path "secret/metadata/mcpctl/*" {
|
||||
capabilities = ["list", "delete"]
|
||||
}
|
||||
|
||||
path "secret/metadata/mcpctl/" {
|
||||
capabilities = ["list"]
|
||||
}
|
||||
```
|
||||
|
||||
Grant `delete` on `metadata/...` only if you need mcpctl to fully remove
|
||||
secrets — OpenBao soft-deletes until the metadata is gone.
|
||||
|
||||
#### Chicken-and-egg: where does the OpenBao token live?
|
||||
|
||||
mcpd reads the OpenBao token from a `Secret` on the **plaintext** backend.
|
||||
That's the whole point of keeping plaintext around — it's the trust root:
|
||||
|
||||
1. Operator creates a plaintext `Secret` holding the bootstrap token.
|
||||
2. Operator creates the `openbao` backend, pointing at that secret via
|
||||
`tokenSecretRef`.
|
||||
3. Operator runs `mcpctl migrate secrets --from default --to bao` to move all
|
||||
other secrets off plaintext.
|
||||
4. After migration, the only sensitive row left on plaintext is the OpenBao
|
||||
token itself. DB access is now equivalent to OpenBao token access (a single
|
||||
key), not equivalent to all API keys in the system.
|
||||
|
||||
Follow-up work (not shipped yet) replaces static token auth with Kubernetes
|
||||
ServiceAccount auth so no bootstrap token is needed at all.
|
||||
|
||||
## Migration — `mcpctl migrate secrets`
|
||||
|
||||
Atomicity is **per secret**, not per batch. Remote writes can't roll back, so we
|
||||
don't pretend. For each secret the service:
|
||||
|
||||
1. Reads the plaintext from the source driver.
|
||||
2. Writes it to the destination driver.
|
||||
3. Updates the `Secret` row: flips `backendId`, sets new `externalRef`, clears
|
||||
`data`.
|
||||
4. Deletes from source (skipped with `--keep-source`).
|
||||
|
||||
If the command is interrupted between step 2 and 3, the destination has an
|
||||
orphan entry but the source still owns the row. Re-running is idempotent — the
|
||||
service skips secrets that are already on the destination and picks up the
|
||||
rest.
|
||||
|
||||
```bash
|
||||
# Dry-run first: see what would move.
|
||||
mcpctl migrate secrets --from default --to bao --dry-run
|
||||
|
||||
# Migrate everything.
|
||||
mcpctl migrate secrets --from default --to bao
|
||||
|
||||
# Migrate a subset only.
|
||||
mcpctl migrate secrets --from default --to bao --names api-keys,oauth-client
|
||||
|
||||
# Leave the source copy in place (useful for A/B validation).
|
||||
mcpctl migrate secrets --from default --to bao --keep-source
|
||||
```
|
||||
|
||||
The command prints a per-secret summary (migrated / skipped / failed) and exits
|
||||
non-zero if any secret failed. Ctrl-C during the run is safe — restart when you
|
||||
want, no duplicate writes.
|
||||
|
||||
## RBAC
|
||||
|
||||
- `resource: secretbackends` — gated like any other resource (`view`,
|
||||
`create`, `edit`, `delete`).
|
||||
- `role: run, action: migrate-secrets` — required to call
|
||||
`POST /api/v1/secrets/migrate`.
|
||||
|
||||
Describe output masks config values whose keys look like credentials
|
||||
(`token`, `secret`, `password`, `key`), so `mcpctl describe secretbackend` is
|
||||
safe to paste into tickets.
|
||||
@@ -53,18 +53,30 @@ else
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo ">>> Step 1/3: Build & push mcpd Docker image"
|
||||
echo ">>> Step 1/4: Build & push mcpd Docker image"
|
||||
echo ""
|
||||
bash scripts/build-mcpd.sh "$@"
|
||||
|
||||
echo ""
|
||||
echo ">>> Step 2/3: Roll out mcpd on k8s ($KUBE_CONTEXT / $KUBE_NAMESPACE)"
|
||||
echo ">>> Step 2/4: Build & push mcplocal (HTTP-mode) Docker image"
|
||||
echo ""
|
||||
bash scripts/build-mcplocal.sh "$@"
|
||||
|
||||
echo ""
|
||||
echo ">>> Step 3/4: Roll out mcpd + mcplocal on k8s ($KUBE_CONTEXT / $KUBE_NAMESPACE)"
|
||||
echo ""
|
||||
kubectl --context "$KUBE_CONTEXT" -n "$KUBE_NAMESPACE" rollout restart "deployment/$KUBE_DEPLOYMENT"
|
||||
kubectl --context "$KUBE_CONTEXT" -n "$KUBE_NAMESPACE" rollout status "deployment/$KUBE_DEPLOYMENT" --timeout=3m
|
||||
if kubectl --context "$KUBE_CONTEXT" -n "$KUBE_NAMESPACE" get deployment/mcplocal >/dev/null 2>&1; then
|
||||
kubectl --context "$KUBE_CONTEXT" -n "$KUBE_NAMESPACE" rollout restart deployment/mcplocal
|
||||
kubectl --context "$KUBE_CONTEXT" -n "$KUBE_NAMESPACE" rollout status deployment/mcplocal --timeout=3m
|
||||
else
|
||||
echo " NOTE: deployment/mcplocal does not exist in the cluster yet — skipping rollout."
|
||||
echo " Apply the Pulumi stack in ../kubernetes-deployment to create it."
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo ">>> Step 3/3: Build, publish & install RPM"
|
||||
echo ">>> Step 4/4: Build, publish & install RPM"
|
||||
echo ""
|
||||
bash scripts/release.sh
|
||||
|
||||
|
||||
83
scripts/build-mcplocal.sh
Executable file
83
scripts/build-mcplocal.sh
Executable file
@@ -0,0 +1,83 @@
|
||||
#!/bin/bash
|
||||
# Build mcplocal (HTTP-only) Docker image and push to Gitea container registry.
|
||||
#
|
||||
# Usage:
|
||||
# ./build-mcplocal.sh [tag] # Build for native arch
|
||||
# ./build-mcplocal.sh [tag] --platform linux/amd64
|
||||
# ./build-mcplocal.sh [tag] --multi-arch
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
# Load .env for GITEA_TOKEN
|
||||
if [ -f .env ]; then
|
||||
set -a; source .env; set +a
|
||||
fi
|
||||
|
||||
# Push directly to internal address (external proxy has body size limit)
|
||||
REGISTRY="10.0.0.194:3012"
|
||||
IMAGE="mcplocal"
|
||||
TAG="${1:-latest}"
|
||||
|
||||
PLATFORM=""
|
||||
MULTI_ARCH=false
|
||||
shift 2>/dev/null || true
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--platform)
|
||||
PLATFORM="$2"
|
||||
shift 2
|
||||
;;
|
||||
--multi-arch)
|
||||
MULTI_ARCH=true
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
shift
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ "$MULTI_ARCH" = true ]; then
|
||||
echo "==> Building multi-arch $IMAGE image (linux/amd64 + linux/arm64)..."
|
||||
podman build --platform linux/amd64,linux/arm64 \
|
||||
--manifest "$IMAGE:$TAG" -f deploy/Dockerfile.mcplocal .
|
||||
|
||||
echo "==> Tagging manifest as $REGISTRY/michal/$IMAGE:$TAG..."
|
||||
podman tag "$IMAGE:$TAG" "$REGISTRY/michal/$IMAGE:$TAG"
|
||||
|
||||
echo "==> Logging in to $REGISTRY..."
|
||||
podman login --tls-verify=false -u michal -p "$GITEA_TOKEN" "$REGISTRY"
|
||||
|
||||
echo "==> Pushing manifest to $REGISTRY/michal/$IMAGE:$TAG..."
|
||||
podman manifest push --tls-verify=false --all \
|
||||
"$REGISTRY/michal/$IMAGE:$TAG" "docker://$REGISTRY/michal/$IMAGE:$TAG"
|
||||
else
|
||||
PLATFORM_FLAG=""
|
||||
if [ -n "$PLATFORM" ]; then
|
||||
PLATFORM_FLAG="--platform $PLATFORM"
|
||||
echo "==> Building $IMAGE image for $PLATFORM..."
|
||||
else
|
||||
echo "==> Building $IMAGE image (native arch)..."
|
||||
fi
|
||||
|
||||
podman build $PLATFORM_FLAG -t "$IMAGE:$TAG" -f deploy/Dockerfile.mcplocal .
|
||||
|
||||
echo "==> Tagging as $REGISTRY/michal/$IMAGE:$TAG..."
|
||||
podman tag "$IMAGE:$TAG" "$REGISTRY/michal/$IMAGE:$TAG"
|
||||
|
||||
echo "==> Logging in to $REGISTRY..."
|
||||
podman login --tls-verify=false -u michal -p "$GITEA_TOKEN" "$REGISTRY"
|
||||
|
||||
echo "==> Pushing to $REGISTRY/michal/$IMAGE:$TAG..."
|
||||
podman push --tls-verify=false "$REGISTRY/michal/$IMAGE:$TAG"
|
||||
fi
|
||||
|
||||
# Ensure package is linked to the repository
|
||||
source "$SCRIPT_DIR/link-package.sh"
|
||||
link_package "container" "$IMAGE"
|
||||
|
||||
echo "==> Done!"
|
||||
echo " Image: $REGISTRY/michal/$IMAGE:$TAG"
|
||||
169
scripts/demo-mcp-call.py
Executable file
169
scripts/demo-mcp-call.py
Executable file
@@ -0,0 +1,169 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Demo: make an MCP request against mcplocal using an McpToken bearer.
|
||||
|
||||
This is the standalone counterpart to `mcpctl test mcp` — intended to show
|
||||
exactly what a non-Claude client (e.g. a vLLM-driven agent) would do.
|
||||
|
||||
Usage:
|
||||
# Default: localhost mcplocal, sre project, token from $MCPCTL_TOKEN
|
||||
export MCPCTL_TOKEN=mcpctl_pat_...
|
||||
python3 scripts/demo-mcp-call.py
|
||||
|
||||
# Custom URL/project/tool
|
||||
python3 scripts/demo-mcp-call.py \\
|
||||
--url https://mcp.ad.itaz.eu \\
|
||||
--project sre \\
|
||||
--token "$MCPCTL_TOKEN" \\
|
||||
--tool begin_session \\
|
||||
--args '{"description":"hello"}'
|
||||
|
||||
No third-party deps — pure stdlib. Mirrors the protocol that
|
||||
src/shared/src/mcp-http/index.ts implements on the TypeScript side.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
from typing import Any
|
||||
|
||||
|
||||
def _parse_sse(body: str) -> list[dict[str, Any]]:
|
||||
"""Parse a text/event-stream body into a list of JSON-RPC messages."""
|
||||
out: list[dict[str, Any]] = []
|
||||
for line in body.splitlines():
|
||||
if line.startswith("data: "):
|
||||
try:
|
||||
out.append(json.loads(line[6:]))
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
return out
|
||||
|
||||
|
||||
class McpSession:
|
||||
def __init__(self, url: str, bearer: str | None = None, timeout: float = 30.0):
|
||||
self.url = url
|
||||
self.bearer = bearer
|
||||
self.timeout = timeout
|
||||
self.session_id: str | None = None
|
||||
self._next_id = 1
|
||||
|
||||
def _headers(self) -> dict[str, str]:
|
||||
h = {
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json, text/event-stream",
|
||||
}
|
||||
if self.bearer:
|
||||
h["Authorization"] = f"Bearer {self.bearer}"
|
||||
if self.session_id:
|
||||
h["mcp-session-id"] = self.session_id
|
||||
return h
|
||||
|
||||
def send(self, method: str, params: dict[str, Any] | None = None) -> Any:
|
||||
rid = self._next_id
|
||||
self._next_id += 1
|
||||
payload = {"jsonrpc": "2.0", "id": rid, "method": method, "params": params or {}}
|
||||
req = urllib.request.Request(
|
||||
self.url,
|
||||
data=json.dumps(payload).encode("utf-8"),
|
||||
headers=self._headers(),
|
||||
method="POST",
|
||||
)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=self.timeout) as resp:
|
||||
body = resp.read().decode("utf-8")
|
||||
content_type = resp.headers.get("content-type", "")
|
||||
# First successful response carries the session id.
|
||||
if self.session_id is None:
|
||||
sid = resp.headers.get("mcp-session-id")
|
||||
if sid:
|
||||
self.session_id = sid
|
||||
messages: list[dict[str, Any]] = (
|
||||
_parse_sse(body) if "text/event-stream" in content_type else [json.loads(body)]
|
||||
)
|
||||
except urllib.error.HTTPError as e:
|
||||
err_body = e.read().decode("utf-8", errors="replace")
|
||||
raise SystemExit(f"HTTP {e.code} from {self.url}: {err_body}") from None
|
||||
except urllib.error.URLError as e:
|
||||
raise SystemExit(f"transport error reaching {self.url}: {e.reason}") from None
|
||||
|
||||
# Pick the response matching our id; fall back to first message.
|
||||
matched = next((m for m in messages if m.get("id") == rid), messages[0] if messages else None)
|
||||
if matched is None:
|
||||
raise SystemExit(f"no response for {method}")
|
||||
if "error" in matched:
|
||||
err = matched["error"]
|
||||
raise SystemExit(f"MCP error {err.get('code')}: {err.get('message')}")
|
||||
return matched.get("result")
|
||||
|
||||
def initialize(self) -> dict[str, Any]:
|
||||
return self.send(
|
||||
"initialize",
|
||||
{
|
||||
"protocolVersion": "2024-11-05",
|
||||
"capabilities": {},
|
||||
"clientInfo": {"name": "demo-mcp-call.py", "version": "1.0.0"},
|
||||
},
|
||||
)
|
||||
|
||||
def list_tools(self) -> list[dict[str, Any]]:
|
||||
result = self.send("tools/list")
|
||||
return result.get("tools", []) if isinstance(result, dict) else []
|
||||
|
||||
def call_tool(self, name: str, args: dict[str, Any]) -> Any:
|
||||
return self.send("tools/call", {"name": name, "arguments": args})
|
||||
|
||||
|
||||
def main() -> int:
|
||||
ap = argparse.ArgumentParser(description="Demo MCP request via McpToken bearer.")
|
||||
ap.add_argument("--url", default=os.environ.get("MCPGW_URL", "http://localhost:3200"),
|
||||
help="Base URL of mcplocal (default: $MCPGW_URL or http://localhost:3200)")
|
||||
ap.add_argument("--project", default="sre",
|
||||
help="Project name (default: sre). Must match the token's bound project.")
|
||||
ap.add_argument("--token", default=os.environ.get("MCPCTL_TOKEN"),
|
||||
help="Raw mcpctl_pat_* bearer (default: $MCPCTL_TOKEN)")
|
||||
ap.add_argument("--tool", help="Optionally call a tool after tools/list")
|
||||
ap.add_argument("--args", default="{}", help="JSON-encoded arguments for --tool")
|
||||
ap.add_argument("--timeout", type=float, default=30.0)
|
||||
opts = ap.parse_args()
|
||||
|
||||
if not opts.token:
|
||||
ap.error("--token or $MCPCTL_TOKEN required")
|
||||
|
||||
endpoint = f"{opts.url.rstrip('/')}/projects/{opts.project}/mcp"
|
||||
print(f"→ POST {endpoint}")
|
||||
print(f" Bearer: {opts.token[:16]}…")
|
||||
print()
|
||||
|
||||
sess = McpSession(endpoint, bearer=opts.token, timeout=opts.timeout)
|
||||
|
||||
info = sess.initialize()
|
||||
server_info = info.get("serverInfo", {}) if isinstance(info, dict) else {}
|
||||
print(f"initialize: protocol={info.get('protocolVersion') if isinstance(info, dict) else '?'} "
|
||||
f"server={server_info.get('name', '?')}/{server_info.get('version', '?')} "
|
||||
f"sessionId={sess.session_id}")
|
||||
|
||||
tools = sess.list_tools()
|
||||
print(f"tools/list: {len(tools)} tool(s)")
|
||||
for t in tools:
|
||||
desc = (t.get("description") or "").splitlines()[0][:80]
|
||||
print(f" - {t['name']} {desc}")
|
||||
|
||||
if opts.tool:
|
||||
try:
|
||||
args = json.loads(opts.args)
|
||||
except json.JSONDecodeError as e:
|
||||
raise SystemExit(f"--args must be valid JSON: {e}")
|
||||
print(f"\ntools/call: {opts.tool} {args}")
|
||||
result = sess.call_tool(opts.tool, args)
|
||||
print(json.dumps(result, indent=2)[:2000])
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -184,7 +184,7 @@ async function extractTree(): Promise<CmdInfo> {
|
||||
// ============================================================
|
||||
|
||||
const CANONICAL_RESOURCES = [
|
||||
'servers', 'instances', 'secrets', 'templates', 'projects',
|
||||
'servers', 'instances', 'secrets', 'secretbackends', 'templates', 'projects',
|
||||
'users', 'groups', 'rbac', 'prompts', 'promptrequests',
|
||||
'serverattachments', 'proxymodels', 'all',
|
||||
];
|
||||
@@ -193,6 +193,7 @@ const ALIAS_ENTRIES: [string, string][] = [
|
||||
['server', 'servers'], ['srv', 'servers'],
|
||||
['instance', 'instances'], ['inst', 'instances'],
|
||||
['secret', 'secrets'], ['sec', 'secrets'],
|
||||
['secretbackend', 'secretbackends'], ['sb', 'secretbackends'],
|
||||
['template', 'templates'], ['tpl', 'templates'],
|
||||
['project', 'projects'], ['proj', 'projects'],
|
||||
['user', 'users'],
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import http from 'node:http';
|
||||
import https from 'node:https';
|
||||
|
||||
export interface ApiClientOptions {
|
||||
baseUrl: string;
|
||||
@@ -31,16 +32,18 @@ function request<T>(method: string, url: string, timeout: number, body?: unknown
|
||||
if (token) {
|
||||
headers['Authorization'] = `Bearer ${token}`;
|
||||
}
|
||||
const isHttps = parsed.protocol === 'https:';
|
||||
const opts: http.RequestOptions = {
|
||||
hostname: parsed.hostname,
|
||||
port: parsed.port,
|
||||
port: parsed.port || (isHttps ? 443 : 80),
|
||||
path: parsed.pathname + parsed.search,
|
||||
method,
|
||||
timeout,
|
||||
headers,
|
||||
};
|
||||
|
||||
const req = http.request(opts, (res) => {
|
||||
const driver = isHttps ? https : http;
|
||||
const req = driver.request(opts, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
|
||||
@@ -41,6 +41,14 @@ const SecretSpecSchema = z.object({
|
||||
data: z.record(z.string()).default({}),
|
||||
});
|
||||
|
||||
const SecretBackendSpecSchema = z.object({
|
||||
name: z.string().min(1),
|
||||
type: z.string().min(1),
|
||||
description: z.string().default(''),
|
||||
isDefault: z.boolean().optional(),
|
||||
config: z.record(z.unknown()).default({}),
|
||||
});
|
||||
|
||||
const TemplateEnvEntrySchema = z.object({
|
||||
name: z.string().min(1),
|
||||
description: z.string().optional(),
|
||||
@@ -132,7 +140,17 @@ const ProjectSpecSchema = z.object({
|
||||
servers: z.array(z.string()).default([]),
|
||||
});
|
||||
|
||||
const McpTokenSpecSchema = z.object({
|
||||
name: z.string().min(1).max(100).regex(/^[a-z0-9-]+$/),
|
||||
project: z.string().min(1),
|
||||
description: z.string().default(''),
|
||||
expiresAt: z.union([z.string().datetime(), z.null()]).optional(),
|
||||
rbacMode: z.enum(['empty', 'clone']).default('empty'),
|
||||
bindings: z.array(RbacRoleBindingSchema).default([]),
|
||||
});
|
||||
|
||||
const ApplyConfigSchema = z.object({
|
||||
secretbackends: z.array(SecretBackendSpecSchema).default([]),
|
||||
secrets: z.array(SecretSpecSchema).default([]),
|
||||
servers: z.array(ServerSpecSchema).default([]),
|
||||
users: z.array(UserSpecSchema).default([]),
|
||||
@@ -143,6 +161,7 @@ const ApplyConfigSchema = z.object({
|
||||
rbacBindings: z.array(RbacBindingSpecSchema).default([]),
|
||||
rbac: z.array(RbacBindingSpecSchema).default([]),
|
||||
prompts: z.array(PromptSpecSchema).default([]),
|
||||
mcptokens: z.array(McpTokenSpecSchema).default([]),
|
||||
}).transform((data) => ({
|
||||
...data,
|
||||
// Merge rbac into rbacBindings so both keys work
|
||||
@@ -173,6 +192,7 @@ export function createApplyCommand(deps: ApplyCommandDeps): Command {
|
||||
|
||||
if (opts.dryRun) {
|
||||
log('Dry run - would apply:');
|
||||
if (config.secretbackends.length > 0) log(` ${config.secretbackends.length} secretbackend(s)`);
|
||||
if (config.secrets.length > 0) log(` ${config.secrets.length} secret(s)`);
|
||||
if (config.servers.length > 0) log(` ${config.servers.length} server(s)`);
|
||||
if (config.users.length > 0) log(` ${config.users.length} user(s)`);
|
||||
@@ -182,6 +202,7 @@ export function createApplyCommand(deps: ApplyCommandDeps): Command {
|
||||
if (config.serverattachments.length > 0) log(` ${config.serverattachments.length} serverattachment(s)`);
|
||||
if (config.rbacBindings.length > 0) log(` ${config.rbacBindings.length} rbacBinding(s)`);
|
||||
if (config.prompts.length > 0) log(` ${config.prompts.length} prompt(s)`);
|
||||
if (config.mcptokens.length > 0) log(` ${config.mcptokens.length} mcptoken(s)`);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -217,6 +238,8 @@ const KIND_TO_RESOURCE: Record<string, string> = {
|
||||
prompt: 'prompts',
|
||||
promptrequest: 'promptrequests',
|
||||
serverattachment: 'serverattachments',
|
||||
mcptoken: 'mcptokens',
|
||||
secretbackend: 'secretbackends',
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -312,6 +335,30 @@ async function applyConfig(client: ApiClient, config: ApplyConfig, log: (...args
|
||||
}
|
||||
}
|
||||
|
||||
// Apply secret backends first — secrets reference them.
|
||||
// When multiple backends claim isDefault: true, the server's atomic swap will
|
||||
// leave whichever was applied last as the effective default.
|
||||
for (const sb of config.secretbackends) {
|
||||
try {
|
||||
const existing = await cachedFindByName('secretbackends', sb.name);
|
||||
if (existing) {
|
||||
const updateBody: Record<string, unknown> = {
|
||||
config: sb.config,
|
||||
description: sb.description,
|
||||
};
|
||||
if (sb.isDefault !== undefined) updateBody.isDefault = sb.isDefault;
|
||||
await withRetry(() => client.put(`/api/v1/secretbackends/${existing.id}`, updateBody));
|
||||
log(`Updated secretbackend: ${sb.name}`);
|
||||
} else {
|
||||
await withRetry(() => client.post('/api/v1/secretbackends', sb));
|
||||
invalidateCache('secretbackends');
|
||||
log(`Created secretbackend: ${sb.name}`);
|
||||
}
|
||||
} catch (err) {
|
||||
log(`Error applying secretbackend '${sb.name}': ${err instanceof Error ? err.message : err}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply secrets
|
||||
for (const secret of config.secrets) {
|
||||
try {
|
||||
@@ -529,6 +576,46 @@ async function applyConfig(client: ApiClient, config: ApplyConfig, log: (...args
|
||||
log(`Error applying prompt '${prompt.name}': ${err instanceof Error ? err.message : err}`);
|
||||
}
|
||||
}
|
||||
|
||||
// --- McpTokens ---
|
||||
// Apply semantics: tokens are immutable (their secret is minted once). If an
|
||||
// active token with the same name+project already exists we skip, logging the
|
||||
// state. Otherwise we create and log the raw token (shown exactly once).
|
||||
for (const tok of config.mcptokens) {
|
||||
try {
|
||||
const proj = await cachedFindByName('projects', tok.project);
|
||||
if (!proj) {
|
||||
log(`Error applying mcptoken '${tok.name}': project '${tok.project}' not found`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if an active one already exists
|
||||
const existing = await client
|
||||
.get<Array<{ id: string; name: string; status: string }>>(`/api/v1/mcptokens?projectName=${encodeURIComponent(tok.project)}`)
|
||||
.catch(() => []);
|
||||
const active = existing.find((t) => t.name === tok.name && t.status === 'active');
|
||||
if (active) {
|
||||
log(`mcptoken '${tok.name}' already active in project '${tok.project}' — skipped (tokens are immutable)`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const body: Record<string, unknown> = {
|
||||
name: tok.name,
|
||||
projectId: proj.id,
|
||||
description: tok.description,
|
||||
rbacMode: tok.rbacMode,
|
||||
bindings: tok.bindings,
|
||||
};
|
||||
if (tok.expiresAt !== undefined) body.expiresAt = tok.expiresAt;
|
||||
|
||||
const created = await withRetry(() => client.post<{ id: string; name: string; token: string }>('/api/v1/mcptokens', body));
|
||||
log(`Created mcptoken: ${tok.name} (project: ${tok.project})`);
|
||||
log(` token: ${created.token}`);
|
||||
log(' (raw token shown once — copy it now)');
|
||||
} catch (err) {
|
||||
log(`Error applying mcptoken '${tok.name}': ${err instanceof Error ? err.message : err}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function findByField<T extends string>(client: ApiClient, resource: string, field: T, value: string): Promise<unknown | null> {
|
||||
|
||||
@@ -23,6 +23,9 @@ export interface AuditEvent {
|
||||
serverName: string | null;
|
||||
correlationId: string | null;
|
||||
parentEventId: string | null;
|
||||
userName?: string | null;
|
||||
tokenName?: string | null;
|
||||
tokenSha?: string | null;
|
||||
payload: Record<string, unknown>;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Command } from 'commander';
|
||||
import { type ApiClient, ApiError } from '../api-client.js';
|
||||
import { resolveNameOrId } from './shared.js';
|
||||
import { parseRoleBinding } from './rbac-bindings.js';
|
||||
export interface CreateCommandDeps {
|
||||
client: ApiClient;
|
||||
log: (...args: unknown[]) => void;
|
||||
@@ -10,6 +11,37 @@ function collect(value: string, prev: string[]): string[] {
|
||||
return [...prev, value];
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a `--ttl` value.
|
||||
*
|
||||
* - `"never"` → null (no expiry)
|
||||
* - `"30d"`, `"12h"`, `"2w"`, `"90m"`, `"60s"` → ISO8601 string relative to now
|
||||
* - An ISO8601 datetime → returned as-is
|
||||
*/
|
||||
function parseTtl(value: string): string | null {
|
||||
const trimmed = value.trim();
|
||||
if (trimmed.toLowerCase() === 'never') return null;
|
||||
const match = trimmed.match(/^(\d+)([smhdw])$/i);
|
||||
if (match) {
|
||||
const amount = Number(match[1]);
|
||||
const unit = match[2]!.toLowerCase();
|
||||
const multipliers: Record<string, number> = {
|
||||
s: 1000,
|
||||
m: 60 * 1000,
|
||||
h: 3600 * 1000,
|
||||
d: 86400 * 1000,
|
||||
w: 7 * 86400 * 1000,
|
||||
};
|
||||
return new Date(Date.now() + amount * multipliers[unit]!).toISOString();
|
||||
}
|
||||
// Try to parse as ISO8601
|
||||
const parsed = new Date(trimmed);
|
||||
if (isNaN(parsed.getTime())) {
|
||||
throw new Error(`Invalid --ttl '${value}'. Expected 'never', a duration like '30d' / '12h', or an ISO8601 datetime.`);
|
||||
}
|
||||
return parsed.toISOString();
|
||||
}
|
||||
|
||||
interface ServerEnvEntry {
|
||||
name: string;
|
||||
value?: string;
|
||||
@@ -56,7 +88,7 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
|
||||
const { client, log } = deps;
|
||||
|
||||
const cmd = new Command('create')
|
||||
.description('Create a resource (server, secret, project, user, group, rbac, serverattachment, prompt)');
|
||||
.description('Create a resource (server, secret, secretbackend, project, user, group, rbac, serverattachment, prompt)');
|
||||
|
||||
// --- create server ---
|
||||
cmd.command('server')
|
||||
@@ -220,6 +252,70 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
|
||||
}
|
||||
});
|
||||
|
||||
// --- create secretbackend ---
|
||||
cmd.command('secretbackend')
|
||||
.alias('sb')
|
||||
.description('Create a secret backend (plaintext, openbao)')
|
||||
.argument('<name>', 'Backend name (lowercase, hyphens allowed)')
|
||||
.requiredOption('--type <type>', 'Backend type (plaintext, openbao)')
|
||||
.option('--description <text>', 'Description')
|
||||
.option('--default', 'Promote this backend to default (atomically demotes the current one)')
|
||||
.option('--url <url>', 'openbao: vault URL (e.g. http://bao.example:8200)')
|
||||
.option('--namespace <ns>', 'openbao: X-Vault-Namespace header value')
|
||||
.option('--mount <mount>', 'openbao: KV v2 mount point (default: secret)')
|
||||
.option('--path-prefix <prefix>', 'openbao: path prefix under mount (default: mcpctl)')
|
||||
.option('--token-secret <ref>', 'openbao: token secret reference in SECRET/KEY form (e.g. bao-creds/token)')
|
||||
.option('--config <entry>', 'Extra config as key=value (repeat for multiple)', collect, [])
|
||||
.option('--force', 'Update if already exists')
|
||||
.action(async (name: string, opts) => {
|
||||
const type = opts.type as string;
|
||||
const config: Record<string, unknown> = {};
|
||||
|
||||
if (type === 'openbao') {
|
||||
if (!opts.url) throw new Error('--url is required for openbao backend');
|
||||
if (!opts.tokenSecret) throw new Error('--token-secret is required for openbao backend (format: SECRET/KEY)');
|
||||
const slashIdx = (opts.tokenSecret as string).indexOf('/');
|
||||
if (slashIdx < 1) throw new Error(`Invalid --token-secret '${opts.tokenSecret as string}'. Expected SECRET_NAME/KEY_NAME`);
|
||||
config.url = opts.url;
|
||||
config.tokenSecretRef = {
|
||||
name: (opts.tokenSecret as string).slice(0, slashIdx),
|
||||
key: (opts.tokenSecret as string).slice(slashIdx + 1),
|
||||
};
|
||||
if (opts.namespace) config.namespace = opts.namespace;
|
||||
if (opts.mount) config.mount = opts.mount;
|
||||
if (opts.pathPrefix) config.pathPrefix = opts.pathPrefix;
|
||||
}
|
||||
|
||||
// Extra config key=value pairs (overwrite/extend above)
|
||||
for (const entry of opts.config as string[]) {
|
||||
const eqIdx = entry.indexOf('=');
|
||||
if (eqIdx === -1) throw new Error(`Invalid --config '${entry}'. Expected key=value`);
|
||||
config[entry.slice(0, eqIdx)] = entry.slice(eqIdx + 1);
|
||||
}
|
||||
|
||||
const body: Record<string, unknown> = { name, type, config };
|
||||
if (opts.description !== undefined) body.description = opts.description;
|
||||
if (opts.default) body.isDefault = true;
|
||||
|
||||
try {
|
||||
const row = await client.post<{ id: string; name: string }>('/api/v1/secretbackends', body);
|
||||
log(`secretbackend '${row.name}' created (id: ${row.id})`);
|
||||
if (opts.default) log(` promoted to default backend`);
|
||||
} catch (err) {
|
||||
if (err instanceof ApiError && err.status === 409 && opts.force) {
|
||||
const existing = (await client.get<Array<{ id: string; name: string }>>('/api/v1/secretbackends')).find((b) => b.name === name);
|
||||
if (!existing) throw err;
|
||||
const updateBody: Record<string, unknown> = { config };
|
||||
if (opts.description !== undefined) updateBody.description = opts.description;
|
||||
if (opts.default) updateBody.isDefault = true;
|
||||
await client.put(`/api/v1/secretbackends/${existing.id}`, updateBody);
|
||||
log(`secretbackend '${name}' updated (id: ${existing.id})`);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// --- create project ---
|
||||
cmd.command('project')
|
||||
.description('Create a project')
|
||||
@@ -331,8 +427,12 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
|
||||
.description('Create an RBAC binding definition')
|
||||
.argument('<name>', 'RBAC binding name')
|
||||
.option('--subject <entry>', 'Subject as Kind:name (repeat for multiple)', collect, [])
|
||||
.option('--binding <entry>', 'Role binding as role:resource (e.g. edit:servers, run:projects)', collect, [])
|
||||
.option('--operation <action>', 'Operation binding (e.g. logs, backup)', collect, [])
|
||||
.option(
|
||||
'--roleBindings <entry>',
|
||||
'Role binding as key:value pairs, e.g. "role:view,resource:servers" or "role:view,resource:servers,name:my-ha" or "action:logs" (repeat for multiple)',
|
||||
collect,
|
||||
[],
|
||||
)
|
||||
.option('--force', 'Update if already exists')
|
||||
.action(async (name: string, opts) => {
|
||||
const subjects = (opts.subject as string[]).map((entry: string) => {
|
||||
@@ -343,24 +443,7 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
|
||||
return { kind: entry.slice(0, colonIdx), name: entry.slice(colonIdx + 1) };
|
||||
});
|
||||
|
||||
const roleBindings: Array<Record<string, string>> = [];
|
||||
|
||||
// Resource bindings from --binding flag (role:resource or role:resource:name)
|
||||
for (const entry of opts.binding as string[]) {
|
||||
const parts = entry.split(':');
|
||||
if (parts.length === 2) {
|
||||
roleBindings.push({ role: parts[0]!, resource: parts[1]! });
|
||||
} else if (parts.length === 3) {
|
||||
roleBindings.push({ role: parts[0]!, resource: parts[1]!, name: parts[2]! });
|
||||
} else {
|
||||
throw new Error(`Invalid binding format '${entry}'. Expected role:resource or role:resource:name (e.g. edit:servers, view:servers:my-ha)`);
|
||||
}
|
||||
}
|
||||
|
||||
// Operation bindings from --operation flag
|
||||
for (const action of opts.operation as string[]) {
|
||||
roleBindings.push({ role: 'run', action });
|
||||
}
|
||||
const roleBindings = (opts.roleBindings as string[]).map((entry: string) => parseRoleBinding(entry));
|
||||
|
||||
const body: Record<string, unknown> = {
|
||||
name,
|
||||
@@ -384,6 +467,83 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
|
||||
}
|
||||
});
|
||||
|
||||
// --- create mcptoken ---
|
||||
cmd.command('mcptoken')
|
||||
.description('Create a project-scoped API token for HTTP-mode mcplocal. The raw token is printed once.')
|
||||
.argument('<name>', 'Token name (unique within a project)')
|
||||
.requiredOption('-p, --project <name>', 'Project this token is bound to')
|
||||
.option('--rbac <mode>', "Base RBAC: 'empty' (default, no bindings) or 'clone' (snapshot creator's perms)", 'empty')
|
||||
.option(
|
||||
'--bind <entry>',
|
||||
'Additional role binding as key:value pairs, e.g. "role:view,resource:servers" or "action:logs" (repeat for multiple). Creator perms are the ceiling.',
|
||||
collect,
|
||||
[],
|
||||
)
|
||||
.option('--ttl <duration>', "Expiry: '30d', '12h', 'never', or an ISO8601 datetime")
|
||||
.option('--description <text>', 'Freeform description')
|
||||
.option('--force', 'Revoke any existing active token with this name, then create a new one')
|
||||
.action(async (name: string, opts) => {
|
||||
// Resolve project name → id (mcpd's create route accepts either, but resolve client-side for clearer errors)
|
||||
const projectId = await resolveNameOrId(client, 'projects', opts.project as string);
|
||||
|
||||
const bindings = (opts.bind as string[]).map((entry: string) => parseRoleBinding(entry));
|
||||
|
||||
const rbacMode = (opts.rbac as string).toLowerCase();
|
||||
if (rbacMode !== 'empty' && rbacMode !== 'clone') {
|
||||
throw new Error(`--rbac must be 'empty' or 'clone' (got '${opts.rbac as string}')`);
|
||||
}
|
||||
|
||||
let expiresAt: string | null | undefined;
|
||||
if (opts.ttl !== undefined) {
|
||||
expiresAt = parseTtl(opts.ttl as string);
|
||||
}
|
||||
|
||||
const body: Record<string, unknown> = {
|
||||
name,
|
||||
projectId,
|
||||
rbacMode,
|
||||
bindings,
|
||||
};
|
||||
if (expiresAt !== undefined) body.expiresAt = expiresAt;
|
||||
if (opts.description !== undefined) body.description = opts.description;
|
||||
|
||||
type Created = {
|
||||
id: string;
|
||||
name: string;
|
||||
projectName: string;
|
||||
tokenPrefix: string;
|
||||
token: string;
|
||||
expiresAt: string | null;
|
||||
};
|
||||
|
||||
const doCreate = async (): Promise<Created> => client.post<Created>('/api/v1/mcptokens', body);
|
||||
|
||||
let created: Created;
|
||||
try {
|
||||
created = await doCreate();
|
||||
} catch (err) {
|
||||
if (err instanceof ApiError && err.status === 409 && opts.force) {
|
||||
// Find the existing active token by name+project and revoke it, then retry.
|
||||
const existing = (await client.get<Array<{ id: string; name: string }>>(
|
||||
`/api/v1/mcptokens?projectName=${encodeURIComponent(opts.project as string)}`,
|
||||
)).find((r) => r.name === name);
|
||||
if (!existing) throw err;
|
||||
await client.post(`/api/v1/mcptokens/${existing.id}/revoke`, {});
|
||||
created = await doCreate();
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
log(`mcptoken '${created.name}' created (project: ${created.projectName}, id: ${created.id})`);
|
||||
log('');
|
||||
log('Copy this token now — it will NOT be shown again:');
|
||||
log('');
|
||||
log(` ${created.token}`);
|
||||
log('');
|
||||
log(`Export it with: export MCPCTL_TOKEN=${created.token}`);
|
||||
});
|
||||
|
||||
// --- create prompt ---
|
||||
cmd.command('prompt')
|
||||
.description('Create an approved prompt')
|
||||
|
||||
@@ -29,6 +29,27 @@ export function createDeleteCommand(deps: DeleteCommandDeps): Command {
|
||||
return;
|
||||
}
|
||||
|
||||
// Mcptokens: names are scoped to a project, so require --project unless the caller passes a CUID
|
||||
if (resource === 'mcptokens') {
|
||||
let tokenId: string;
|
||||
if (/^c[a-z0-9]{24}/.test(idOrName)) {
|
||||
tokenId = idOrName;
|
||||
} else {
|
||||
if (!opts.project) {
|
||||
throw new Error('--project is required to delete an mcptoken by name (or pass the id).');
|
||||
}
|
||||
const items = await client.get<Array<{ id: string; name: string }>>(
|
||||
`/api/v1/mcptokens?projectName=${encodeURIComponent(opts.project)}`,
|
||||
);
|
||||
const match = items.find((i) => i.name === idOrName);
|
||||
if (!match) throw new Error(`mcptoken '${idOrName}' not found in project '${opts.project}'`);
|
||||
tokenId = match.id;
|
||||
}
|
||||
await client.delete(`/api/v1/mcptokens/${tokenId}`);
|
||||
log(`mcptoken '${idOrName}' deleted.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Resolve name → ID for any resource type
|
||||
let id: string;
|
||||
try {
|
||||
|
||||
@@ -218,6 +218,37 @@ function formatSecretDetail(secret: Record<string, unknown>, showValues: boolean
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function formatSecretBackendDetail(backend: Record<string, unknown>): string {
|
||||
const lines: string[] = [];
|
||||
lines.push(`=== SecretBackend: ${backend.name} ===`);
|
||||
lines.push(`${pad('Name:')}${backend.name}`);
|
||||
lines.push(`${pad('Type:')}${backend.type}`);
|
||||
lines.push(`${pad('Default:')}${backend.isDefault ? 'yes' : 'no'}`);
|
||||
if (backend.description) lines.push(`${pad('Description:')}${backend.description}`);
|
||||
|
||||
const config = backend.config as Record<string, unknown> | undefined;
|
||||
if (config && Object.keys(config).length > 0) {
|
||||
lines.push('');
|
||||
lines.push('Config:');
|
||||
const keyW = Math.max(6, ...Object.keys(config).map((k) => k.length)) + 2;
|
||||
for (const [key, value] of Object.entries(config)) {
|
||||
let display: string;
|
||||
if (value === null || value === undefined) display = '-';
|
||||
else if (typeof value === 'object') display = JSON.stringify(value);
|
||||
else display = String(value);
|
||||
lines.push(` ${key.padEnd(keyW)}${display}`);
|
||||
}
|
||||
}
|
||||
|
||||
lines.push('');
|
||||
lines.push('Metadata:');
|
||||
lines.push(` ${pad('ID:', 12)}${backend.id}`);
|
||||
if (backend.createdAt) lines.push(` ${pad('Created:', 12)}${backend.createdAt}`);
|
||||
if (backend.updatedAt) lines.push(` ${pad('Updated:', 12)}${backend.updatedAt}`);
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function formatTemplateDetail(template: Record<string, unknown>): string {
|
||||
const lines: string[] = [];
|
||||
lines.push(`=== Template: ${template.name} ===`);
|
||||
@@ -503,6 +534,42 @@ function formatRbacDetail(rbac: Record<string, unknown>): string {
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function formatMcpTokenDetail(token: Record<string, unknown>, allRbac: RbacDef[]): string {
|
||||
const lines: string[] = [];
|
||||
lines.push(`=== McpToken: ${token.name} ===`);
|
||||
lines.push(`${pad('Name:')}${token.name}`);
|
||||
lines.push(`${pad('Project:')}${token.projectName ?? token.projectId ?? '-'}`);
|
||||
lines.push(`${pad('Status:')}${token.status ?? '-'}`);
|
||||
lines.push(`${pad('Prefix:')}${token.tokenPrefix ?? '-'}`);
|
||||
if (token.description) lines.push(`${pad('Description:')}${token.description}`);
|
||||
lines.push(`${pad('Owner:')}${token.ownerEmail ?? token.ownerId ?? '-'}`);
|
||||
lines.push(`${pad('Created:')}${token.createdAt ?? '-'}`);
|
||||
lines.push(`${pad('Last Used:')}${token.lastUsedAt ?? 'never'}`);
|
||||
lines.push(`${pad('Expires:')}${token.expiresAt ?? 'never'}`);
|
||||
if (token.revokedAt) lines.push(`${pad('Revoked At:')}${token.revokedAt}`);
|
||||
|
||||
// Find the auto-created RbacDefinition (subject McpToken:<sha>) to surface bindings.
|
||||
// We don't know the sha from the describe response — match by convention: name 'mcptoken-<id>'.
|
||||
const rbacDef = allRbac.find((r) => r.name === `mcptoken-${token.id as string}`);
|
||||
if (rbacDef && Array.isArray(rbacDef.roleBindings) && rbacDef.roleBindings.length > 0) {
|
||||
lines.push('');
|
||||
lines.push('Bindings:');
|
||||
for (const b of rbacDef.roleBindings as Array<{ role: string; resource?: string; action?: string; name?: string }>) {
|
||||
if (b.action !== undefined) {
|
||||
lines.push(` run ${b.action}`);
|
||||
} else if (b.resource !== undefined) {
|
||||
lines.push(` ${b.role} ${b.resource}${b.name !== undefined ? `/${b.name}` : ''}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lines.push('');
|
||||
lines.push('Metadata:');
|
||||
lines.push(` ${pad('ID:', 12)}${token.id}`);
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
async function formatPromptDetail(prompt: Record<string, unknown>, client?: ApiClient): Promise<string> {
|
||||
const lines: string[] = [];
|
||||
lines.push(`=== Prompt: ${prompt.name} ===`);
|
||||
@@ -770,6 +837,9 @@ export function createDescribeCommand(deps: DescribeCommandDeps): Command {
|
||||
case 'templates':
|
||||
deps.log(formatTemplateDetail(item));
|
||||
break;
|
||||
case 'secretbackends':
|
||||
deps.log(formatSecretBackendDetail(item));
|
||||
break;
|
||||
case 'projects': {
|
||||
const projectPrompts = await deps.client
|
||||
.get<Array<{ name: string; priority: number; linkTarget: string | null }>>(`/api/v1/prompts?projectId=${item.id as string}`)
|
||||
@@ -801,6 +871,14 @@ export function createDescribeCommand(deps: DescribeCommandDeps): Command {
|
||||
case 'prompts':
|
||||
deps.log(await formatPromptDetail(item, deps.client));
|
||||
break;
|
||||
case 'mcptokens': {
|
||||
// Fetch the auto-created RbacDefinition (if any) so bindings are visible in describe.
|
||||
const rbacForToken = await deps.client
|
||||
.get<RbacDef[]>('/api/v1/rbac')
|
||||
.catch(() => [] as RbacDef[]);
|
||||
deps.log(formatMcpTokenDetail(item, rbacForToken));
|
||||
break;
|
||||
}
|
||||
default:
|
||||
deps.log(formatGenericDetail(item));
|
||||
}
|
||||
|
||||
@@ -119,6 +119,44 @@ const rbacColumns: Column<RbacRow>[] = [
|
||||
{ header: 'ID', key: 'id' },
|
||||
];
|
||||
|
||||
interface SecretBackendRow {
|
||||
id: string;
|
||||
name: string;
|
||||
type: string;
|
||||
isDefault: boolean;
|
||||
description: string;
|
||||
config?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
const secretBackendColumns: Column<SecretBackendRow>[] = [
|
||||
{ header: 'NAME', key: 'name' },
|
||||
{ header: 'TYPE', key: 'type', width: 14 },
|
||||
{ header: 'DEFAULT', key: (r) => r.isDefault ? '*' : '', width: 8 },
|
||||
{ header: 'DESCRIPTION', key: (r) => r.description || '-', width: 30 },
|
||||
{ header: 'ID', key: 'id' },
|
||||
];
|
||||
|
||||
interface McpTokenRow {
|
||||
id: string;
|
||||
name: string;
|
||||
projectName: string;
|
||||
tokenPrefix: string;
|
||||
createdAt: string;
|
||||
lastUsedAt: string | null;
|
||||
expiresAt: string | null;
|
||||
status: 'active' | 'revoked' | 'expired';
|
||||
}
|
||||
|
||||
const mcpTokenColumns: Column<McpTokenRow>[] = [
|
||||
{ header: 'NAME', key: 'name', width: 24 },
|
||||
{ header: 'PROJECT', key: 'projectName', width: 20 },
|
||||
{ header: 'PREFIX', key: 'tokenPrefix', width: 18 },
|
||||
{ header: 'CREATED', key: (r) => new Date(r.createdAt).toLocaleString(), width: 20 },
|
||||
{ header: 'LAST USED', key: (r) => r.lastUsedAt ? new Date(r.lastUsedAt).toLocaleString() : '-', width: 20 },
|
||||
{ header: 'EXPIRES', key: (r) => r.expiresAt ? new Date(r.expiresAt).toLocaleString() : 'never', width: 20 },
|
||||
{ header: 'STATUS', key: 'status', width: 10 },
|
||||
];
|
||||
|
||||
const secretColumns: Column<SecretRow>[] = [
|
||||
{ header: 'NAME', key: 'name' },
|
||||
{ header: 'KEYS', key: (r) => Object.keys(r.data).join(', ') || '-', width: 40 },
|
||||
@@ -242,6 +280,10 @@ function getColumnsForResource(resource: string): Column<Record<string, unknown>
|
||||
return serverAttachmentColumns as unknown as Column<Record<string, unknown>>[];
|
||||
case 'proxymodels':
|
||||
return proxymodelColumns as unknown as Column<Record<string, unknown>>[];
|
||||
case 'mcptokens':
|
||||
return mcpTokenColumns as unknown as Column<Record<string, unknown>>[];
|
||||
case 'secretbackends':
|
||||
return secretBackendColumns as unknown as Column<Record<string, unknown>>[];
|
||||
default:
|
||||
return [
|
||||
{ header: 'ID', key: 'id' as keyof Record<string, unknown> },
|
||||
@@ -263,6 +305,8 @@ const RESOURCE_KIND: Record<string, string> = {
|
||||
prompts: 'prompt',
|
||||
promptrequests: 'promptrequest',
|
||||
serverattachments: 'serverattachment',
|
||||
mcptokens: 'mcptoken',
|
||||
secretbackends: 'secretbackend',
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
80
src/cli/src/commands/migrate.ts
Normal file
80
src/cli/src/commands/migrate.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { Command } from 'commander';
|
||||
import type { ApiClient } from '../api-client.js';
|
||||
|
||||
export interface MigrateCommandDeps {
|
||||
client: ApiClient;
|
||||
log: (...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
interface MigrateResult {
|
||||
migrated: Array<{ name: string }>;
|
||||
skipped: Array<{ name: string; reason: string }>;
|
||||
failed: Array<{ name: string; error: string }>;
|
||||
}
|
||||
|
||||
interface DryRunResult {
|
||||
dryRun: true;
|
||||
candidates: Array<{ id: string; name: string }>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Top-level `mcpctl migrate <subcommand>` verb.
|
||||
*
|
||||
* Today only `secrets` is implemented (SecretBackend → SecretBackend move),
|
||||
* but the command is structured so new migrations can slot in.
|
||||
*
|
||||
* Per-secret atomicity is handled server-side — if this command is interrupted
|
||||
* mid-run, re-running is idempotent (skips secrets already on the destination).
|
||||
*/
|
||||
export function createMigrateCommand(deps: MigrateCommandDeps): Command {
|
||||
const { client, log } = deps;
|
||||
|
||||
const cmd = new Command('migrate')
|
||||
.description('Move resources between backends (currently: secrets between SecretBackends)');
|
||||
|
||||
cmd.command('secrets')
|
||||
.description('Migrate secrets from one SecretBackend to another')
|
||||
.requiredOption('--from <name>', 'Source SecretBackend name')
|
||||
.requiredOption('--to <name>', 'Destination SecretBackend name')
|
||||
.option('--names <csv>', 'Comma-separated secret names (default: all)')
|
||||
.option('--keep-source', 'Leave the source copy intact (default: delete from source after write+commit)')
|
||||
.option('--dry-run', 'Show which secrets would be migrated without touching them')
|
||||
.action(async (opts) => {
|
||||
const body: Record<string, unknown> = { from: opts.from, to: opts.to };
|
||||
if (opts.names) body.names = (opts.names as string).split(',').map((s) => s.trim()).filter(Boolean);
|
||||
if (opts.keepSource) body.keepSource = true;
|
||||
if (opts.dryRun) body.dryRun = true;
|
||||
|
||||
if (opts.dryRun) {
|
||||
const res = await client.post<DryRunResult>('/api/v1/secrets/migrate', body);
|
||||
if (res.candidates.length === 0) {
|
||||
log(`No secrets to migrate from '${opts.from as string}' to '${opts.to as string}'.`);
|
||||
return;
|
||||
}
|
||||
log(`Dry run — ${String(res.candidates.length)} secret(s) would be migrated from '${opts.from as string}' → '${opts.to as string}':`);
|
||||
for (const c of res.candidates) log(` - ${c.name}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const res = await client.post<MigrateResult>('/api/v1/secrets/migrate', body);
|
||||
|
||||
if (res.migrated.length > 0) {
|
||||
log(`Migrated ${String(res.migrated.length)} secret(s) from '${opts.from as string}' → '${opts.to as string}':`);
|
||||
for (const m of res.migrated) log(` ✓ ${m.name}`);
|
||||
}
|
||||
if (res.skipped.length > 0) {
|
||||
log(`Skipped ${String(res.skipped.length)}:`);
|
||||
for (const s of res.skipped) log(` - ${s.name}: ${s.reason}`);
|
||||
}
|
||||
if (res.failed.length > 0) {
|
||||
log(`Failed ${String(res.failed.length)}:`);
|
||||
for (const f of res.failed) log(` ✗ ${f.name}: ${f.error}`);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
if (res.migrated.length === 0 && res.skipped.length === 0 && res.failed.length === 0) {
|
||||
log(`No secrets to migrate from '${opts.from as string}' to '${opts.to as string}'.`);
|
||||
}
|
||||
});
|
||||
|
||||
return cmd;
|
||||
}
|
||||
49
src/cli/src/commands/rbac-bindings.ts
Normal file
49
src/cli/src/commands/rbac-bindings.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
/**
|
||||
* Parse one `--roleBindings <kv>` entry into a role-binding object the API accepts.
|
||||
*
|
||||
* Accepted forms:
|
||||
* role:view,resource:servers → resource binding (unscoped)
|
||||
* role:view,resource:servers,name:my-ha → resource binding (name-scoped)
|
||||
* action:logs → operation binding (role:run is implied)
|
||||
*
|
||||
* Whitespace around keys/values is trimmed. Keys must be one of: role, resource, name, action.
|
||||
*/
|
||||
export type RoleBindingEntry =
|
||||
| { role: string; resource: string; name?: string }
|
||||
| { role: 'run'; action: string };
|
||||
|
||||
export function parseRoleBinding(entry: string): RoleBindingEntry {
|
||||
const pairs: Record<string, string> = {};
|
||||
for (const part of entry.split(',')) {
|
||||
const colonIdx = part.indexOf(':');
|
||||
if (colonIdx === -1) {
|
||||
throw new Error(`Invalid roleBindings entry '${entry}': expected key:value pairs separated by commas`);
|
||||
}
|
||||
const key = part.slice(0, colonIdx).trim();
|
||||
const value = part.slice(colonIdx + 1).trim();
|
||||
if (!key || !value) {
|
||||
throw new Error(`Invalid roleBindings entry '${entry}': empty key or value`);
|
||||
}
|
||||
if (!['role', 'resource', 'name', 'action'].includes(key)) {
|
||||
throw new Error(`Invalid roleBindings key '${key}' in '${entry}': expected one of role, resource, name, action`);
|
||||
}
|
||||
pairs[key] = value;
|
||||
}
|
||||
|
||||
// Operation binding: presence of `action:` implies role:run
|
||||
if (pairs['action'] !== undefined) {
|
||||
if (pairs['resource'] !== undefined || pairs['name'] !== undefined) {
|
||||
throw new Error(`Invalid roleBindings entry '${entry}': 'action' cannot be combined with 'resource' or 'name'`);
|
||||
}
|
||||
return { role: 'run', action: pairs['action'] };
|
||||
}
|
||||
|
||||
// Resource binding
|
||||
if (pairs['role'] === undefined || pairs['resource'] === undefined) {
|
||||
throw new Error(`Invalid roleBindings entry '${entry}': need either 'action:…' or both 'role:…,resource:…'`);
|
||||
}
|
||||
if (pairs['name'] !== undefined) {
|
||||
return { role: pairs['role'], resource: pairs['resource'], name: pairs['name'] };
|
||||
}
|
||||
return { role: pairs['role'], resource: pairs['resource'] };
|
||||
}
|
||||
@@ -27,6 +27,13 @@ export const RESOURCE_ALIASES: Record<string, string> = {
|
||||
proxymodel: 'proxymodels',
|
||||
proxymodels: 'proxymodels',
|
||||
pm: 'proxymodels',
|
||||
mcptoken: 'mcptokens',
|
||||
mcptokens: 'mcptokens',
|
||||
token: 'mcptokens',
|
||||
tokens: 'mcptokens',
|
||||
secretbackend: 'secretbackends',
|
||||
secretbackends: 'secretbackends',
|
||||
sb: 'secretbackends',
|
||||
all: 'all',
|
||||
};
|
||||
|
||||
@@ -72,6 +79,21 @@ export function stripInternalFields(obj: Record<string, unknown>): Record<string
|
||||
delete result[key];
|
||||
}
|
||||
|
||||
// McpToken-specific: promote projectName → project; drop secret/derived fields
|
||||
if ('tokenHash' in result || 'tokenPrefix' in result) {
|
||||
delete result.tokenHash;
|
||||
delete result.tokenPrefix;
|
||||
delete result.lastUsedAt;
|
||||
delete result.revokedAt;
|
||||
delete result.status;
|
||||
delete result.ownerEmail;
|
||||
if (typeof result.projectName === 'string') {
|
||||
result.project = result.projectName;
|
||||
delete result.projectName;
|
||||
delete result.projectId;
|
||||
}
|
||||
}
|
||||
|
||||
// Rename linkTarget → link for cleaner YAML
|
||||
if ('linkTarget' in result) {
|
||||
result.link = result.linkTarget;
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
import { Command } from 'commander';
|
||||
import http from 'node:http';
|
||||
import https from 'node:https';
|
||||
|
||||
/** Pick the http or https driver based on the URL scheme. */
|
||||
function httpDriverFor(url: string): typeof http | typeof https {
|
||||
return new URL(url).protocol === 'https:' ? https : http;
|
||||
}
|
||||
import { loadConfig } from '../config/index.js';
|
||||
import type { ConfigLoaderDeps } from '../config/index.js';
|
||||
import { loadCredentials } from '../auth/index.js';
|
||||
@@ -45,10 +51,16 @@ export interface StatusCommandDeps {
|
||||
|
||||
function defaultCheckHealth(url: string): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
const req = http.get(`${url}/health`, { timeout: 3000 }, (res) => {
|
||||
resolve(res.statusCode !== undefined && res.statusCode >= 200 && res.statusCode < 400);
|
||||
res.resume();
|
||||
});
|
||||
let req: http.ClientRequest;
|
||||
try {
|
||||
req = httpDriverFor(url).get(`${url}/health`, { timeout: 3000 }, (res) => {
|
||||
resolve(res.statusCode !== undefined && res.statusCode >= 200 && res.statusCode < 400);
|
||||
res.resume();
|
||||
});
|
||||
} catch {
|
||||
resolve(false);
|
||||
return;
|
||||
}
|
||||
req.on('error', () => resolve(false));
|
||||
req.on('timeout', () => {
|
||||
req.destroy();
|
||||
@@ -63,26 +75,32 @@ function defaultCheckHealth(url: string): Promise<boolean> {
|
||||
*/
|
||||
function defaultCheckLlm(mcplocalUrl: string): Promise<string> {
|
||||
return new Promise((resolve) => {
|
||||
const req = http.get(`${mcplocalUrl}/llm/health`, { timeout: 45000 }, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
try {
|
||||
const body = JSON.parse(Buffer.concat(chunks).toString('utf-8')) as { status: string; error?: string };
|
||||
if (body.status === 'ok') {
|
||||
resolve('ok');
|
||||
} else if (body.status === 'not configured') {
|
||||
resolve('not configured');
|
||||
} else if (body.error) {
|
||||
resolve(body.error.slice(0, 80));
|
||||
} else {
|
||||
resolve(body.status);
|
||||
let req: http.ClientRequest;
|
||||
try {
|
||||
req = httpDriverFor(mcplocalUrl).get(`${mcplocalUrl}/llm/health`, { timeout: 45000 }, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
try {
|
||||
const body = JSON.parse(Buffer.concat(chunks).toString('utf-8')) as { status: string; error?: string };
|
||||
if (body.status === 'ok') {
|
||||
resolve('ok');
|
||||
} else if (body.status === 'not configured') {
|
||||
resolve('not configured');
|
||||
} else if (body.error) {
|
||||
resolve(body.error.slice(0, 80));
|
||||
} else {
|
||||
resolve(body.status);
|
||||
}
|
||||
} catch {
|
||||
resolve('invalid response');
|
||||
}
|
||||
} catch {
|
||||
resolve('invalid response');
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
} catch {
|
||||
resolve('mcplocal unreachable');
|
||||
return;
|
||||
}
|
||||
req.on('error', () => resolve('mcplocal unreachable'));
|
||||
req.on('timeout', () => { req.destroy(); resolve('timeout'); });
|
||||
});
|
||||
@@ -90,18 +108,24 @@ function defaultCheckLlm(mcplocalUrl: string): Promise<string> {
|
||||
|
||||
function defaultFetchModels(mcplocalUrl: string): Promise<string[]> {
|
||||
return new Promise((resolve) => {
|
||||
const req = http.get(`${mcplocalUrl}/llm/models`, { timeout: 5000 }, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
try {
|
||||
const body = JSON.parse(Buffer.concat(chunks).toString('utf-8')) as { models?: string[] };
|
||||
resolve(body.models ?? []);
|
||||
} catch {
|
||||
resolve([]);
|
||||
}
|
||||
let req: http.ClientRequest;
|
||||
try {
|
||||
req = httpDriverFor(mcplocalUrl).get(`${mcplocalUrl}/llm/models`, { timeout: 5000 }, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
try {
|
||||
const body = JSON.parse(Buffer.concat(chunks).toString('utf-8')) as { models?: string[] };
|
||||
resolve(body.models ?? []);
|
||||
} catch {
|
||||
resolve([]);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
} catch {
|
||||
resolve([]);
|
||||
return;
|
||||
}
|
||||
req.on('error', () => resolve([]));
|
||||
req.on('timeout', () => { req.destroy(); resolve([]); });
|
||||
});
|
||||
@@ -109,18 +133,24 @@ function defaultFetchModels(mcplocalUrl: string): Promise<string[]> {
|
||||
|
||||
function defaultFetchProviders(mcplocalUrl: string): Promise<ProvidersInfo | null> {
|
||||
return new Promise((resolve) => {
|
||||
const req = http.get(`${mcplocalUrl}/llm/providers`, { timeout: 5000 }, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
try {
|
||||
const body = JSON.parse(Buffer.concat(chunks).toString('utf-8')) as ProvidersInfo;
|
||||
resolve(body);
|
||||
} catch {
|
||||
resolve(null);
|
||||
}
|
||||
let req: http.ClientRequest;
|
||||
try {
|
||||
req = httpDriverFor(mcplocalUrl).get(`${mcplocalUrl}/llm/providers`, { timeout: 5000 }, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
try {
|
||||
const body = JSON.parse(Buffer.concat(chunks).toString('utf-8')) as ProvidersInfo;
|
||||
resolve(body);
|
||||
} catch {
|
||||
resolve(null);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
} catch {
|
||||
resolve(null);
|
||||
return;
|
||||
}
|
||||
req.on('error', () => resolve(null));
|
||||
req.on('timeout', () => { req.destroy(); resolve(null); });
|
||||
});
|
||||
|
||||
176
src/cli/src/commands/test-mcp.ts
Normal file
176
src/cli/src/commands/test-mcp.ts
Normal file
@@ -0,0 +1,176 @@
|
||||
import { Command } from 'commander';
|
||||
import { McpHttpSession, McpProtocolError, McpTransportError, deriveBaseUrl, mcpHealthCheck } from '@mcpctl/shared';
|
||||
|
||||
export interface TestMcpCommandDeps {
|
||||
log: (...args: unknown[]) => void;
|
||||
/**
|
||||
* Inject a session factory for testing. The default creates a real `McpHttpSession`.
|
||||
*/
|
||||
createSession?: (url: string, opts: { bearer?: string; timeoutMs?: number }) => {
|
||||
initialize(): Promise<unknown>;
|
||||
listTools(): Promise<Array<{ name: string }>>;
|
||||
callTool(name: string, args: Record<string, unknown>): Promise<unknown>;
|
||||
close(): Promise<void>;
|
||||
};
|
||||
healthCheck?: (baseUrl: string) => Promise<boolean>;
|
||||
}
|
||||
|
||||
export type TestMcpExitCode = 0 | 1 | 2;
|
||||
|
||||
export interface TestMcpReport {
|
||||
url: string;
|
||||
health: 'ok' | 'fail' | 'skipped';
|
||||
initialize: 'ok' | 'fail';
|
||||
tools: string[] | null;
|
||||
toolCall?: { name: string; result: unknown; isError?: boolean };
|
||||
missingTools?: string[];
|
||||
exitCode: TestMcpExitCode;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export function createTestCommand(deps: TestMcpCommandDeps): Command {
|
||||
const { log } = deps;
|
||||
const createSession = deps.createSession ?? ((url, opts) => new McpHttpSession(url, opts));
|
||||
const healthCheck = deps.healthCheck ?? mcpHealthCheck;
|
||||
|
||||
const test = new Command('test').description('Utilities for testing MCP endpoints and config');
|
||||
|
||||
test
|
||||
.command('mcp')
|
||||
.description('Verify a Streamable-HTTP MCP endpoint: health, initialize, tools/list, optionally call a tool.')
|
||||
.argument('<url>', 'Full URL of the MCP endpoint (e.g. https://mcp.example.com/projects/foo/mcp)')
|
||||
.option('--token <bearer>', 'Bearer token (also reads $MCPCTL_TOKEN)')
|
||||
.option('--tool <name>', 'Invoke a specific tool after listing')
|
||||
.option('--args <json>', 'JSON-encoded arguments for --tool', '{}')
|
||||
.option('--expect-tools <list>', 'Comma-separated tool names that MUST appear; fails otherwise')
|
||||
.option('--timeout <seconds>', 'Per-request timeout in seconds', '10')
|
||||
.option('-o, --output <format>', 'Output format: text or json', 'text')
|
||||
.option('--no-health', 'Skip the /healthz preflight check')
|
||||
.action(async (url: string, opts: {
|
||||
token?: string;
|
||||
tool?: string;
|
||||
args: string;
|
||||
expectTools?: string;
|
||||
timeout: string;
|
||||
output: string;
|
||||
health: boolean;
|
||||
}) => {
|
||||
const bearer = opts.token ?? process.env.MCPCTL_TOKEN;
|
||||
const timeoutMs = Number(opts.timeout) * 1000;
|
||||
if (!Number.isFinite(timeoutMs) || timeoutMs <= 0) {
|
||||
throw new Error(`--timeout must be a positive number of seconds (got '${opts.timeout}')`);
|
||||
}
|
||||
|
||||
const report: TestMcpReport = {
|
||||
url,
|
||||
health: 'skipped',
|
||||
initialize: 'fail',
|
||||
tools: null,
|
||||
exitCode: 1,
|
||||
};
|
||||
|
||||
// 1. Health preflight
|
||||
if (opts.health !== false) {
|
||||
const baseUrl = deriveBaseUrl(url);
|
||||
const ok = await healthCheck(baseUrl);
|
||||
report.health = ok ? 'ok' : 'fail';
|
||||
if (!ok) {
|
||||
report.error = `healthz preflight failed at ${baseUrl}/healthz`;
|
||||
return emit(report, opts.output, log);
|
||||
}
|
||||
}
|
||||
|
||||
const sessionOpts: { bearer?: string; timeoutMs: number } = { timeoutMs };
|
||||
if (bearer !== undefined) sessionOpts.bearer = bearer;
|
||||
const session = createSession(url, sessionOpts);
|
||||
|
||||
try {
|
||||
// 2. Initialize
|
||||
await session.initialize();
|
||||
report.initialize = 'ok';
|
||||
|
||||
// 3. tools/list
|
||||
const tools = await session.listTools();
|
||||
report.tools = tools.map((t) => t.name);
|
||||
|
||||
// 4. --expect-tools check
|
||||
if (opts.expectTools !== undefined && opts.expectTools.trim() !== '') {
|
||||
const expected = opts.expectTools.split(',').map((s) => s.trim()).filter(Boolean);
|
||||
const missing = expected.filter((name) => !report.tools!.includes(name));
|
||||
if (missing.length > 0) {
|
||||
report.missingTools = missing;
|
||||
report.exitCode = 2;
|
||||
report.error = `Missing tools: ${missing.join(', ')}`;
|
||||
return emit(report, opts.output, log);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Optional --tool call
|
||||
if (opts.tool !== undefined) {
|
||||
let parsedArgs: Record<string, unknown> = {};
|
||||
try {
|
||||
parsedArgs = JSON.parse(opts.args) as Record<string, unknown>;
|
||||
} catch {
|
||||
throw new Error(`--args must be valid JSON (got '${opts.args}')`);
|
||||
}
|
||||
const result = await session.callTool(opts.tool, parsedArgs);
|
||||
const toolCall: TestMcpReport['toolCall'] = { name: opts.tool, result };
|
||||
if (typeof result === 'object' && result !== null && 'isError' in result) {
|
||||
toolCall.isError = Boolean((result as { isError?: boolean }).isError);
|
||||
}
|
||||
report.toolCall = toolCall;
|
||||
if (toolCall.isError) {
|
||||
report.exitCode = 2;
|
||||
report.error = `Tool '${opts.tool}' returned isError=true`;
|
||||
return emit(report, opts.output, log);
|
||||
}
|
||||
}
|
||||
|
||||
report.exitCode = 0;
|
||||
} catch (err) {
|
||||
if (err instanceof McpProtocolError) {
|
||||
report.exitCode = 1;
|
||||
report.error = `protocol error ${err.code}: ${err.message}`;
|
||||
} else if (err instanceof McpTransportError) {
|
||||
report.exitCode = 1;
|
||||
report.error = `transport error (HTTP ${err.status}): ${err.message}`;
|
||||
} else {
|
||||
report.exitCode = 1;
|
||||
report.error = err instanceof Error ? err.message : String(err);
|
||||
}
|
||||
} finally {
|
||||
await session.close().catch(() => { /* best-effort */ });
|
||||
}
|
||||
|
||||
return emit(report, opts.output, log);
|
||||
});
|
||||
|
||||
return test;
|
||||
}
|
||||
|
||||
function emit(report: TestMcpReport, output: string, log: (...args: unknown[]) => void): void {
|
||||
if (output === 'json') {
|
||||
log(JSON.stringify(report, null, 2));
|
||||
} else {
|
||||
log(`URL: ${report.url}`);
|
||||
log(`Health: ${report.health}`);
|
||||
log(`Initialize: ${report.initialize}`);
|
||||
if (report.tools !== null) {
|
||||
log(`Tools (${report.tools.length}): ${report.tools.slice(0, 10).join(', ')}${report.tools.length > 10 ? `, …(+${report.tools.length - 10})` : ''}`);
|
||||
}
|
||||
if (report.missingTools !== undefined) {
|
||||
log(`Missing: ${report.missingTools.join(', ')}`);
|
||||
}
|
||||
if (report.toolCall !== undefined) {
|
||||
log(`Tool call: ${report.toolCall.name} → ${report.toolCall.isError ? 'ERROR' : 'ok'}`);
|
||||
}
|
||||
if (report.error !== undefined) {
|
||||
log(`Error: ${report.error}`);
|
||||
}
|
||||
log(`Result: ${report.exitCode === 0 ? 'PASS' : report.exitCode === 2 ? 'CONTRACT FAIL' : 'TRANSPORT/AUTH FAIL'}`);
|
||||
}
|
||||
|
||||
if (report.exitCode !== 0) {
|
||||
process.exitCode = report.exitCode;
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import { createDescribeCommand } from './commands/describe.js';
|
||||
import { createDeleteCommand } from './commands/delete.js';
|
||||
import { createLogsCommand } from './commands/logs.js';
|
||||
import { createApplyCommand } from './commands/apply.js';
|
||||
import { createTestCommand } from './commands/test-mcp.js';
|
||||
import { createCreateCommand } from './commands/create.js';
|
||||
import { createEditCommand } from './commands/edit.js';
|
||||
import { createBackupCommand } from './commands/backup.js';
|
||||
@@ -17,6 +18,7 @@ import { createMcpCommand } from './commands/mcp.js';
|
||||
import { createPatchCommand } from './commands/patch.js';
|
||||
import { createConsoleCommand } from './commands/console/index.js';
|
||||
import { createCacheCommand } from './commands/cache.js';
|
||||
import { createMigrateCommand } from './commands/migrate.js';
|
||||
import { ApiClient, ApiError } from './api-client.js';
|
||||
import { loadConfig } from './config/index.js';
|
||||
import { loadCredentials } from './auth/index.js';
|
||||
@@ -99,6 +101,25 @@ export function createProgram(): Command {
|
||||
}
|
||||
}
|
||||
|
||||
// --project scoping for mcptokens
|
||||
if (!nameOrId && resource === 'mcptokens' && projectName) {
|
||||
return client.get<unknown[]>(`/api/v1/mcptokens?projectName=${encodeURIComponent(projectName)}`);
|
||||
}
|
||||
|
||||
// Name-based lookup for mcptokens: names are unique only within a project
|
||||
if (nameOrId && resource === 'mcptokens' && !/^c[a-z0-9]{24}/.test(nameOrId)) {
|
||||
if (!projectName) {
|
||||
throw new Error('mcptoken names are scoped to a project — pass --project <name> or use the token id (cuid)');
|
||||
}
|
||||
const items = await client.get<Array<{ id: string; name: string }>>(
|
||||
`/api/v1/mcptokens?projectName=${encodeURIComponent(projectName)}`,
|
||||
);
|
||||
const match = items.find((i) => i.name === nameOrId);
|
||||
if (!match) throw new Error(`mcptoken '${nameOrId}' not found in project '${projectName}'`);
|
||||
const item = await client.get(`/api/v1/mcptokens/${match.id}`);
|
||||
return [item];
|
||||
}
|
||||
|
||||
if (nameOrId) {
|
||||
// Glob pattern — use query param filtering
|
||||
if (nameOrId.includes('*')) {
|
||||
@@ -132,6 +153,19 @@ export function createProgram(): Command {
|
||||
return client.get(`/api/v1/${resource}/${match.id as string}`);
|
||||
}
|
||||
|
||||
// Mcptokens: names are project-scoped. CUIDs pass straight through.
|
||||
if (resource === 'mcptokens' && !/^c[a-z0-9]{24}/.test(nameOrId)) {
|
||||
if (!projectName) {
|
||||
throw new Error('mcptoken names are scoped to a project — pass --project <name> or use the token id (cuid)');
|
||||
}
|
||||
const items = await client.get<Array<Record<string, unknown>>>(
|
||||
`/api/v1/mcptokens?projectName=${encodeURIComponent(projectName)}`,
|
||||
);
|
||||
const match = items.find((item) => item.name === nameOrId);
|
||||
if (!match) throw new Error(`mcptoken '${nameOrId}' not found in project '${projectName}'`);
|
||||
return client.get(`/api/v1/mcptokens/${match.id as string}`);
|
||||
}
|
||||
|
||||
let id: string;
|
||||
try {
|
||||
id = await resolveNameOrId(client, resource, nameOrId);
|
||||
@@ -212,6 +246,15 @@ export function createProgram(): Command {
|
||||
mcplocalUrl: config.mcplocalUrl,
|
||||
}));
|
||||
|
||||
program.addCommand(createTestCommand({
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
program.addCommand(createMigrateCommand({
|
||||
client,
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
|
||||
@@ -318,8 +318,8 @@ describe('create command', () => {
|
||||
'rbac', 'developers',
|
||||
'--subject', 'User:alice@test.com',
|
||||
'--subject', 'Group:dev-team',
|
||||
'--binding', 'edit:servers',
|
||||
'--binding', 'view:instances',
|
||||
'--roleBindings', 'role:edit,resource:servers',
|
||||
'--roleBindings', 'role:view,resource:instances',
|
||||
], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
||||
@@ -342,7 +342,7 @@ describe('create command', () => {
|
||||
await cmd.parseAsync([
|
||||
'rbac', 'admins',
|
||||
'--subject', 'User:admin@test.com',
|
||||
'--binding', 'edit:*',
|
||||
'--roleBindings', 'role:edit,resource:*',
|
||||
], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
||||
@@ -371,18 +371,18 @@ describe('create command', () => {
|
||||
).rejects.toThrow('Invalid subject format');
|
||||
});
|
||||
|
||||
it('throws on invalid binding format', async () => {
|
||||
it('throws on invalid roleBindings format', async () => {
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await expect(
|
||||
cmd.parseAsync(['rbac', 'bad', '--binding', 'no-colon'], { from: 'user' }),
|
||||
).rejects.toThrow('Invalid binding format');
|
||||
cmd.parseAsync(['rbac', 'bad', '--roleBindings', 'no-colon'], { from: 'user' }),
|
||||
).rejects.toThrow(/Invalid roleBindings/);
|
||||
});
|
||||
|
||||
it('throws on 409 without --force', async () => {
|
||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"RBAC already exists"}'));
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await expect(
|
||||
cmd.parseAsync(['rbac', 'developers', '--subject', 'User:a@b.com', '--binding', 'edit:servers'], { from: 'user' }),
|
||||
cmd.parseAsync(['rbac', 'developers', '--subject', 'User:a@b.com', '--roleBindings', 'role:edit,resource:servers'], { from: 'user' }),
|
||||
).rejects.toThrow('API error 409');
|
||||
});
|
||||
|
||||
@@ -393,7 +393,7 @@ describe('create command', () => {
|
||||
await cmd.parseAsync([
|
||||
'rbac', 'developers',
|
||||
'--subject', 'User:new@test.com',
|
||||
'--binding', 'edit:*',
|
||||
'--roleBindings', 'role:edit,resource:*',
|
||||
'--force',
|
||||
], { from: 'user' });
|
||||
|
||||
@@ -404,15 +404,15 @@ describe('create command', () => {
|
||||
expect(output.join('\n')).toContain("rbac 'developers' updated");
|
||||
});
|
||||
|
||||
it('creates an RBAC definition with operation bindings', async () => {
|
||||
it('creates an RBAC definition with operation bindings (action:… shorthand)', async () => {
|
||||
vi.mocked(client.post).mockResolvedValueOnce({ id: 'rbac-1', name: 'ops' });
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync([
|
||||
'rbac', 'ops',
|
||||
'--subject', 'Group:ops-team',
|
||||
'--binding', 'edit:servers',
|
||||
'--operation', 'logs',
|
||||
'--operation', 'backup',
|
||||
'--roleBindings', 'role:edit,resource:servers',
|
||||
'--roleBindings', 'action:logs',
|
||||
'--roleBindings', 'action:backup',
|
||||
], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
||||
@@ -433,7 +433,7 @@ describe('create command', () => {
|
||||
await cmd.parseAsync([
|
||||
'rbac', 'ha-viewer',
|
||||
'--subject', 'User:alice@test.com',
|
||||
'--binding', 'view:servers:my-ha',
|
||||
'--roleBindings', 'role:view,resource:servers,name:my-ha',
|
||||
], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
||||
|
||||
54
src/cli/tests/commands/rbac-bindings.test.ts
Normal file
54
src/cli/tests/commands/rbac-bindings.test.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { parseRoleBinding } from '../../src/commands/rbac-bindings.js';
|
||||
|
||||
describe('parseRoleBinding', () => {
|
||||
it('parses an unscoped resource binding', () => {
|
||||
expect(parseRoleBinding('role:view,resource:servers')).toEqual({
|
||||
role: 'view',
|
||||
resource: 'servers',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses a name-scoped resource binding', () => {
|
||||
expect(parseRoleBinding('role:view,resource:servers,name:my-ha')).toEqual({
|
||||
role: 'view',
|
||||
resource: 'servers',
|
||||
name: 'my-ha',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses an operation binding via the action shorthand', () => {
|
||||
expect(parseRoleBinding('action:logs')).toEqual({
|
||||
role: 'run',
|
||||
action: 'logs',
|
||||
});
|
||||
});
|
||||
|
||||
it('trims whitespace around keys and values', () => {
|
||||
expect(parseRoleBinding('role: edit , resource: * ')).toEqual({
|
||||
role: 'edit',
|
||||
resource: '*',
|
||||
});
|
||||
});
|
||||
|
||||
it('rejects a pair with no colon', () => {
|
||||
expect(() => parseRoleBinding('role=view')).toThrow(/key:value pairs/);
|
||||
});
|
||||
|
||||
it('rejects an unknown key', () => {
|
||||
expect(() => parseRoleBinding('role:view,resource:servers,scope:project')).toThrow(/Invalid roleBindings key 'scope'/);
|
||||
});
|
||||
|
||||
it('rejects an empty value', () => {
|
||||
expect(() => parseRoleBinding('role:view,resource:')).toThrow(/empty key or value/);
|
||||
});
|
||||
|
||||
it('rejects action combined with resource/name', () => {
|
||||
expect(() => parseRoleBinding('action:logs,resource:servers')).toThrow(/cannot be combined/);
|
||||
});
|
||||
|
||||
it('requires both role and resource when action is absent', () => {
|
||||
expect(() => parseRoleBinding('role:view')).toThrow(/need either 'action/);
|
||||
expect(() => parseRoleBinding('resource:servers')).toThrow(/need either 'action/);
|
||||
});
|
||||
});
|
||||
168
src/cli/tests/commands/test-mcp.test.ts
Normal file
168
src/cli/tests/commands/test-mcp.test.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { createTestCommand } from '../../src/commands/test-mcp.js';
|
||||
|
||||
function makeSession(overrides: Partial<{
|
||||
initialize: () => Promise<unknown>;
|
||||
listTools: () => Promise<Array<{ name: string }>>;
|
||||
callTool: (name: string, args: Record<string, unknown>) => Promise<unknown>;
|
||||
close: () => Promise<void>;
|
||||
}> = {}) {
|
||||
return {
|
||||
initialize: overrides.initialize ?? vi.fn(async () => ({ protocolVersion: '2024-11-05' })),
|
||||
listTools: overrides.listTools ?? vi.fn(async () => [{ name: 'echo' }, { name: 'search' }]),
|
||||
callTool: overrides.callTool ?? vi.fn(async () => ({ content: [{ type: 'text', text: 'hi' }] })),
|
||||
close: overrides.close ?? vi.fn(async () => { /* no-op */ }),
|
||||
};
|
||||
}
|
||||
|
||||
describe('mcpctl test mcp', () => {
|
||||
const output: string[] = [];
|
||||
const log = (...args: unknown[]) => {
|
||||
output.push(args.map(String).join(' '));
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
output.length = 0;
|
||||
process.exitCode = 0;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
process.exitCode = 0;
|
||||
});
|
||||
|
||||
it('exits 0 on happy path (health + initialize + tools/list)', async () => {
|
||||
const session = makeSession();
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => session,
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(['mcp', 'https://mcp.example.com/projects/foo/mcp'], { from: 'user' });
|
||||
expect(process.exitCode).toBe(0);
|
||||
expect(session.initialize).toHaveBeenCalled();
|
||||
expect(session.listTools).toHaveBeenCalled();
|
||||
expect(output.join('\n')).toContain('Result: PASS');
|
||||
});
|
||||
|
||||
it('exits 1 when the /healthz preflight fails', async () => {
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession(),
|
||||
healthCheck: async () => false,
|
||||
});
|
||||
await cmd.parseAsync(['mcp', 'https://mcp.example.com/projects/foo/mcp'], { from: 'user' });
|
||||
expect(process.exitCode).toBe(1);
|
||||
expect(output.join('\n')).toContain('healthz preflight failed');
|
||||
});
|
||||
|
||||
it('exits 2 (contract fail) when --expect-tools are missing', async () => {
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession({
|
||||
listTools: async () => [{ name: 'echo' }],
|
||||
}),
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(
|
||||
['mcp', 'https://mcp.example.com/projects/foo/mcp', '--expect-tools', 'echo,search'],
|
||||
{ from: 'user' },
|
||||
);
|
||||
expect(process.exitCode).toBe(2);
|
||||
expect(output.join('\n')).toContain('Missing: search');
|
||||
expect(output.join('\n')).toContain('CONTRACT FAIL');
|
||||
});
|
||||
|
||||
it('exits 0 when --expect-tools all match', async () => {
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession({
|
||||
listTools: async () => [{ name: 'echo' }, { name: 'search' }, { name: 'x' }],
|
||||
}),
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(
|
||||
['mcp', 'https://mcp.example.com/projects/foo/mcp', '--expect-tools', 'echo,search'],
|
||||
{ from: 'user' },
|
||||
);
|
||||
expect(process.exitCode).toBe(0);
|
||||
});
|
||||
|
||||
it('exits 1 on transport/auth failure (initialize throws)', async () => {
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession({
|
||||
initialize: async () => { throw new Error('HTTP 401: unauthorized'); },
|
||||
}),
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(['mcp', 'https://mcp.example.com/projects/foo/mcp'], { from: 'user' });
|
||||
expect(process.exitCode).toBe(1);
|
||||
expect(output.join('\n')).toContain('Error:');
|
||||
expect(output.join('\n')).toContain('TRANSPORT/AUTH FAIL');
|
||||
});
|
||||
|
||||
it('invokes --tool with --args and reports isError', async () => {
|
||||
const callTool = vi.fn(async () => ({ content: [{ type: 'text', text: 'oops' }], isError: true }));
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession({ callTool }),
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(
|
||||
['mcp', 'https://mcp.example.com/projects/foo/mcp', '--tool', 'echo', '--args', '{"msg":"hi"}'],
|
||||
{ from: 'user' },
|
||||
);
|
||||
expect(callTool).toHaveBeenCalledWith('echo', { msg: 'hi' });
|
||||
expect(process.exitCode).toBe(2);
|
||||
});
|
||||
|
||||
it('outputs a JSON report with -o json', async () => {
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession(),
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(
|
||||
['mcp', 'https://mcp.example.com/projects/foo/mcp', '-o', 'json'],
|
||||
{ from: 'user' },
|
||||
);
|
||||
const parsed = JSON.parse(output.join('\n')) as { exitCode: number; tools: string[] };
|
||||
expect(parsed.exitCode).toBe(0);
|
||||
expect(parsed.tools).toEqual(['echo', 'search']);
|
||||
});
|
||||
|
||||
it('reads $MCPCTL_TOKEN when --token is not given', async () => {
|
||||
let observedBearer: string | undefined;
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: (_url, opts) => {
|
||||
observedBearer = opts.bearer;
|
||||
return makeSession();
|
||||
},
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
const prev = process.env.MCPCTL_TOKEN;
|
||||
process.env.MCPCTL_TOKEN = 'mcpctl_pat_fromenv';
|
||||
try {
|
||||
await cmd.parseAsync(['mcp', 'https://mcp.example.com/projects/foo/mcp'], { from: 'user' });
|
||||
} finally {
|
||||
if (prev === undefined) delete process.env.MCPCTL_TOKEN;
|
||||
else process.env.MCPCTL_TOKEN = prev;
|
||||
}
|
||||
expect(observedBearer).toBe('mcpctl_pat_fromenv');
|
||||
});
|
||||
|
||||
it('rejects invalid --args as JSON', async () => {
|
||||
const cmd = createTestCommand({
|
||||
log,
|
||||
createSession: () => makeSession(),
|
||||
healthCheck: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(
|
||||
['mcp', 'https://mcp.example.com/projects/foo/mcp', '--tool', 'echo', '--args', 'not-json'],
|
||||
{ from: 'user' },
|
||||
);
|
||||
expect(process.exitCode).toBe(1);
|
||||
expect(output.join('\n')).toContain('must be valid JSON');
|
||||
});
|
||||
});
|
||||
@@ -25,6 +25,7 @@ model User {
|
||||
auditLogs AuditLog[]
|
||||
ownedProjects Project[]
|
||||
groupMemberships GroupMember[]
|
||||
mcpTokens McpToken[]
|
||||
|
||||
@@index([email])
|
||||
}
|
||||
@@ -110,17 +111,48 @@ model McpTemplate {
|
||||
@@index([name])
|
||||
}
|
||||
|
||||
// ── Secret Backends ──
|
||||
//
|
||||
// Pluggable storage for Secret.data. Default is `plaintext` (data stored in
|
||||
// Secret.data JSON). Other drivers (e.g. `openbao`) store only a reference in
|
||||
// Secret.externalRef and fetch actual values from the external system at read
|
||||
// time. A `plaintext` row is seeded on first startup so the system always has
|
||||
// a viable backend; additional backends are user-managed via
|
||||
// `mcpctl create secretbackend`.
|
||||
|
||||
model SecretBackend {
|
||||
id String @id @default(cuid())
|
||||
name String @unique
|
||||
type String // plaintext | openbao | (future: vault, aws-sm, ...)
|
||||
config Json @default("{}") // type-specific: url, mount, namespace, tokenSecretRef
|
||||
isDefault Boolean @default(false) // exactly one row has isDefault=true
|
||||
description String @default("")
|
||||
version Int @default(1)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
secrets Secret[]
|
||||
|
||||
@@index([name])
|
||||
@@index([isDefault])
|
||||
}
|
||||
|
||||
// ── Secrets ──
|
||||
|
||||
model Secret {
|
||||
id String @id @default(cuid())
|
||||
name String @unique
|
||||
data Json @default("{}")
|
||||
version Int @default(1)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
id String @id @default(cuid())
|
||||
name String @unique
|
||||
backendId String // FK to SecretBackend — dispatches read/write
|
||||
data Json @default("{}") // populated by plaintext backend only
|
||||
externalRef String @default("") // populated by non-plaintext backends (e.g. "mount/path#v3")
|
||||
version Int @default(1)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
backend SecretBackend @relation(fields: [backendId], references: [id])
|
||||
|
||||
@@index([name])
|
||||
@@index([backendId])
|
||||
}
|
||||
|
||||
// ── Groups ──
|
||||
@@ -187,6 +219,7 @@ model Project {
|
||||
servers ProjectServer[]
|
||||
prompts Prompt[]
|
||||
promptRequests PromptRequest[]
|
||||
mcpTokens McpToken[]
|
||||
|
||||
@@index([name])
|
||||
@@index([ownerId])
|
||||
@@ -204,6 +237,36 @@ model ProjectServer {
|
||||
@@unique([projectId, serverId])
|
||||
}
|
||||
|
||||
// ── MCP Tokens (bearer credentials for HTTP-mode mcplocal) ──
|
||||
//
|
||||
// Raw value format: `mcpctl_pat_<32 base62 chars>`. The raw value is shown
|
||||
// exactly once at create time; only the SHA-256 hash is persisted. Tokens are
|
||||
// scoped to exactly one project — they're only valid at
|
||||
// `/projects/<that-project>/mcp`. Creator's RBAC is the ceiling; the service
|
||||
// rejects bindings that exceed what the creator themselves can do.
|
||||
|
||||
model McpToken {
|
||||
id String @id @default(cuid())
|
||||
name String
|
||||
projectId String
|
||||
tokenHash String @unique
|
||||
tokenPrefix String
|
||||
ownerId String
|
||||
description String @default("")
|
||||
createdAt DateTime @default(now())
|
||||
expiresAt DateTime?
|
||||
lastUsedAt DateTime?
|
||||
revokedAt DateTime?
|
||||
|
||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||
owner User @relation(fields: [ownerId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@unique([name, projectId])
|
||||
@@index([tokenHash])
|
||||
@@index([projectId])
|
||||
@@index([ownerId])
|
||||
}
|
||||
|
||||
// ── MCP Instances (running containers) ──
|
||||
|
||||
model McpInstance {
|
||||
@@ -288,6 +351,8 @@ model AuditEvent {
|
||||
correlationId String?
|
||||
parentEventId String?
|
||||
userName String?
|
||||
tokenName String?
|
||||
tokenSha String?
|
||||
payload Json
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
@@ -297,6 +362,7 @@ model AuditEvent {
|
||||
@@index([timestamp])
|
||||
@@index([eventKind])
|
||||
@@index([userName])
|
||||
@@index([tokenSha])
|
||||
}
|
||||
|
||||
// ── Backup Pending Queue ──
|
||||
|
||||
53
src/mcpd/src/bootstrap/secret-backends.ts
Normal file
53
src/mcpd/src/bootstrap/secret-backends.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
/**
|
||||
* Bootstrap the `plaintext` SecretBackend + backfill existing Secret rows.
|
||||
*
|
||||
* Runs on every mcpd startup. Idempotent:
|
||||
* - if no SecretBackend exists, create `default` (type `plaintext`, isDefault=true)
|
||||
* - if any Secret has no backendId (fresh after schema migration), point it at `default`
|
||||
* - if no backend is currently flagged default, promote `default`
|
||||
*
|
||||
* Safe to run repeatedly; never destroys configuration.
|
||||
*/
|
||||
import type { PrismaClient } from '@prisma/client';
|
||||
|
||||
/** Well-known name for the always-present plaintext backend. */
|
||||
export const DEFAULT_PLAINTEXT_BACKEND_NAME = 'default';
|
||||
|
||||
export async function bootstrapSecretBackends(prisma: PrismaClient): Promise<void> {
|
||||
let plaintext = await prisma.secretBackend.findUnique({
|
||||
where: { name: DEFAULT_PLAINTEXT_BACKEND_NAME },
|
||||
});
|
||||
|
||||
if (plaintext === null) {
|
||||
plaintext = await prisma.secretBackend.create({
|
||||
data: {
|
||||
name: DEFAULT_PLAINTEXT_BACKEND_NAME,
|
||||
type: 'plaintext',
|
||||
isDefault: true,
|
||||
description: 'Default in-database plaintext backend. Seeded on first startup.',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const currentDefault = await prisma.secretBackend.findFirst({ where: { isDefault: true } });
|
||||
if (currentDefault === null) {
|
||||
await prisma.secretBackend.update({
|
||||
where: { id: plaintext.id },
|
||||
data: { isDefault: true },
|
||||
});
|
||||
}
|
||||
|
||||
// Backfill any secrets left with an empty backendId after the schema migration.
|
||||
// `findMany({ where: { backendId: '' } })` catches rows that existed before
|
||||
// the column was added and had a default-empty value assigned.
|
||||
const orphans = await prisma.secret.findMany({
|
||||
where: { backendId: '' },
|
||||
select: { id: true },
|
||||
});
|
||||
if (orphans.length > 0) {
|
||||
await prisma.secret.updateMany({
|
||||
where: { id: { in: orphans.map((o) => o.id) } },
|
||||
data: { backendId: plaintext.id },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -18,7 +18,14 @@ import {
|
||||
UserRepository,
|
||||
GroupRepository,
|
||||
AuditEventRepository,
|
||||
McpTokenRepository,
|
||||
} from './repositories/index.js';
|
||||
import { SecretBackendRepository } from './repositories/secret-backend.repository.js';
|
||||
import { SecretBackendService } from './services/secret-backend.service.js';
|
||||
import { SecretMigrateService } from './services/secret-migrate.service.js';
|
||||
import { bootstrapSecretBackends } from './bootstrap/secret-backends.js';
|
||||
import { registerSecretBackendRoutes } from './routes/secret-backends.js';
|
||||
import { registerSecretMigrateRoutes } from './routes/secret-migrate.js';
|
||||
import { PromptRepository } from './repositories/prompt.repository.js';
|
||||
import { PromptRequestRepository } from './repositories/prompt-request.repository.js';
|
||||
import { bootstrapSystemProject } from './bootstrap/system-project.js';
|
||||
@@ -43,6 +50,7 @@ import {
|
||||
UserService,
|
||||
GroupService,
|
||||
AuditEventService,
|
||||
McpTokenService,
|
||||
} from './services/index.js';
|
||||
import type { RbacAction } from './services/index.js';
|
||||
import type { UpdateRbacDefinitionInput } from './validation/rbac-definition.schema.js';
|
||||
@@ -62,6 +70,7 @@ import {
|
||||
registerUserRoutes,
|
||||
registerGroupRoutes,
|
||||
registerAuditEventRoutes,
|
||||
registerMcpTokenRoutes,
|
||||
} from './routes/index.js';
|
||||
import { registerPromptRoutes } from './routes/prompts.js';
|
||||
import { registerGitBackupRoutes } from './routes/git-backup.js';
|
||||
@@ -90,11 +99,14 @@ function mapUrlToPermission(method: string, url: string): PermissionCheck {
|
||||
if (segment === 'backup') return { kind: 'operation', operation: 'backup' };
|
||||
if (segment === 'restore') return { kind: 'operation', operation: 'restore' };
|
||||
if (segment === 'audit-logs' && method === 'DELETE') return { kind: 'operation', operation: 'audit-purge' };
|
||||
// /api/v1/secrets/migrate is a bulk cross-backend operation — treat as op, not a plain secret write.
|
||||
if (url.startsWith('/api/v1/secrets/migrate')) return { kind: 'operation', operation: 'migrate-secrets' };
|
||||
|
||||
const resourceMap: Record<string, string | undefined> = {
|
||||
'servers': 'servers',
|
||||
'instances': 'instances',
|
||||
'secrets': 'secrets',
|
||||
'secretbackends': 'secretbackends',
|
||||
'projects': 'projects',
|
||||
'templates': 'templates',
|
||||
'users': 'users',
|
||||
@@ -104,6 +116,7 @@ function mapUrlToPermission(method: string, url: string): PermissionCheck {
|
||||
'mcp': 'servers',
|
||||
'prompts': 'prompts',
|
||||
'promptrequests': 'promptrequests',
|
||||
'mcptokens': 'mcptokens',
|
||||
};
|
||||
|
||||
const resource = resourceMap[segment];
|
||||
@@ -116,6 +129,12 @@ function mapUrlToPermission(method: string, url: string): PermissionCheck {
|
||||
return { kind: 'resource', resource: 'promptrequests', action: 'delete', resourceName: approveMatch[1] };
|
||||
}
|
||||
|
||||
// Special case: /api/v1/mcptokens/:id/revoke → treated as 'delete' on the token.
|
||||
const revokeMatch = url.match(/^\/api\/v1\/mcptokens\/([^/?]+)\/revoke/);
|
||||
if (revokeMatch?.[1]) {
|
||||
return { kind: 'resource', resource: 'mcptokens', action: 'delete', resourceName: revokeMatch[1] };
|
||||
}
|
||||
|
||||
// Special case: /api/v1/projects/:name/prompts/visible → view prompts
|
||||
const visiblePromptsMatch = url.match(/^\/api\/v1\/projects\/([^/?]+)\/prompts\/visible/);
|
||||
if (visiblePromptsMatch?.[1]) {
|
||||
@@ -251,6 +270,7 @@ async function main(): Promise<void> {
|
||||
// Repositories
|
||||
const serverRepo = new McpServerRepository(prisma);
|
||||
const secretRepo = new SecretRepository(prisma);
|
||||
const secretBackendRepo = new SecretBackendRepository(prisma);
|
||||
const instanceRepo = new McpInstanceRepository(prisma);
|
||||
const projectRepo = new ProjectRepository(prisma);
|
||||
const auditLogRepo = new AuditLogRepository(prisma);
|
||||
@@ -259,14 +279,21 @@ async function main(): Promise<void> {
|
||||
const rbacDefinitionRepo = new RbacDefinitionRepository(prisma);
|
||||
const userRepo = new UserRepository(prisma);
|
||||
const groupRepo = new GroupRepository(prisma);
|
||||
const mcpTokenRepo = new McpTokenRepository(prisma);
|
||||
|
||||
// SecretBackend bootstrap: ensure a `plaintext` default row exists and any
|
||||
// pre-existing `Secret` rows are pointed at it. Idempotent per run.
|
||||
await bootstrapSecretBackends(prisma);
|
||||
|
||||
// CUID detection for RBAC name resolution
|
||||
const CUID_RE = /^c[^\s-]{8,}$/i;
|
||||
const nameResolvers: Record<string, { findById(id: string): Promise<{ name: string } | null> }> = {
|
||||
servers: serverRepo,
|
||||
secrets: secretRepo,
|
||||
secretbackends: secretBackendRepo,
|
||||
projects: projectRepo,
|
||||
groups: groupRepo,
|
||||
mcptokens: mcpTokenRepo,
|
||||
};
|
||||
|
||||
// Migrate legacy 'admin' role → granular roles
|
||||
@@ -279,9 +306,29 @@ async function main(): Promise<void> {
|
||||
|
||||
// Services
|
||||
const serverService = new McpServerService(serverRepo);
|
||||
const instanceService = new InstanceService(instanceRepo, serverRepo, orchestrator, secretRepo);
|
||||
// SecretBackend service — needs a lazy bridge to the yet-to-be-constructed
|
||||
// SecretService because the OpenBao driver's auth token lives in a plaintext
|
||||
// Secret. The bridge defers the resolve until after `secretService` is
|
||||
// assigned, breaking the circular dependency at construction time.
|
||||
const secretResolverBridge = {
|
||||
resolve: async (name: string, key: string): Promise<string> => secretService.resolve(name, key),
|
||||
};
|
||||
const secretBackendService = new SecretBackendService(secretBackendRepo, {
|
||||
plaintext: {
|
||||
listAllPlaintext: async () => {
|
||||
const rows = await prisma.secret.findMany({
|
||||
where: { backend: { type: 'plaintext' } },
|
||||
select: { name: true, data: true },
|
||||
});
|
||||
return rows.map((r) => ({ name: r.name, data: r.data as Record<string, string> }));
|
||||
},
|
||||
},
|
||||
secretRefResolver: secretResolverBridge,
|
||||
});
|
||||
const secretService = new SecretService(secretRepo, secretBackendService);
|
||||
const secretMigrateService = new SecretMigrateService(secretRepo, secretBackendService);
|
||||
const instanceService = new InstanceService(instanceRepo, serverRepo, orchestrator, secretService);
|
||||
serverService.setInstanceService(instanceService);
|
||||
const secretService = new SecretService(secretRepo);
|
||||
const projectService = new ProjectService(projectRepo, serverRepo);
|
||||
const auditLogService = new AuditLogService(auditLogRepo);
|
||||
const auditEventService = new AuditEventService(auditEventRepo);
|
||||
@@ -292,6 +339,7 @@ async function main(): Promise<void> {
|
||||
const mcpProxyService = new McpProxyService(instanceRepo, serverRepo, orchestrator);
|
||||
const rbacDefinitionService = new RbacDefinitionService(rbacDefinitionRepo);
|
||||
const rbacService = new RbacService(rbacDefinitionRepo, prisma);
|
||||
const mcpTokenService = new McpTokenService(mcpTokenRepo, projectRepo, rbacDefinitionRepo, rbacService);
|
||||
const userService = new UserService(userRepo);
|
||||
const groupService = new GroupService(groupRepo, userRepo);
|
||||
const promptRepo = new PromptRepository(prisma);
|
||||
@@ -300,12 +348,30 @@ async function main(): Promise<void> {
|
||||
promptRuleRegistry.register(systemPromptVarsRule);
|
||||
const promptService = new PromptService(promptRepo, promptRequestRepo, projectRepo, promptRuleRegistry);
|
||||
const backupService = new BackupService(serverRepo, projectRepo, secretRepo, userRepo, groupRepo, rbacDefinitionRepo, promptRepo, templateRepo);
|
||||
const restoreService = new RestoreService(serverRepo, projectRepo, secretRepo, userRepo, groupRepo, rbacDefinitionRepo, promptRepo, templateRepo);
|
||||
const restoreService = new RestoreService(serverRepo, projectRepo, secretRepo, secretService, userRepo, groupRepo, rbacDefinitionRepo, promptRepo, templateRepo);
|
||||
|
||||
// Auth middleware for global hooks
|
||||
const authMiddleware = createAuthMiddleware({
|
||||
findSession: (token) => authService.findSession(token),
|
||||
});
|
||||
// Shared auth dependencies. Both the global auth hook and the per-route
|
||||
// preHandler on /api/v1/mcp/proxy must know how to resolve both session
|
||||
// bearers AND mcpctl_pat_ bearers, or mcplocal→mcpd proxy calls with a
|
||||
// McpToken will 401 at the route layer even though the global hook accepts them.
|
||||
const authDeps = {
|
||||
findSession: (token: string) => authService.findSession(token),
|
||||
findMcpToken: async (tokenHash: string) => {
|
||||
const row = await mcpTokenRepo.findByHash(tokenHash);
|
||||
if (row === null) return null;
|
||||
return {
|
||||
tokenId: row.id,
|
||||
tokenName: row.name,
|
||||
tokenSha: row.tokenHash,
|
||||
projectId: row.projectId,
|
||||
projectName: row.project.name,
|
||||
ownerId: row.ownerId,
|
||||
expiresAt: row.expiresAt,
|
||||
revokedAt: row.revokedAt,
|
||||
};
|
||||
},
|
||||
};
|
||||
const authMiddleware = createAuthMiddleware(authDeps);
|
||||
|
||||
// Server
|
||||
const app = await createServer(config, {
|
||||
@@ -329,6 +395,8 @@ async function main(): Promise<void> {
|
||||
const url = request.url;
|
||||
// Skip auth for health, auth, and root
|
||||
if (url.startsWith('/api/v1/auth/') || url === '/healthz' || url === '/health') return;
|
||||
// Introspection authenticates via the McpToken bearer itself — route handles its own auth.
|
||||
if (url.startsWith('/api/v1/mcptokens/introspect')) return;
|
||||
if (!url.startsWith('/api/v1/')) return;
|
||||
|
||||
// Run auth middleware
|
||||
@@ -351,9 +419,28 @@ async function main(): Promise<void> {
|
||||
const saHeader = request.headers['x-service-account'];
|
||||
const serviceAccountName = typeof saHeader === 'string' ? saHeader : undefined;
|
||||
|
||||
// McpToken principal (set by authMiddleware when the bearer was mcpctl_pat_…)
|
||||
const mcpTokenSha = request.mcpToken?.tokenSha;
|
||||
|
||||
// Second layer of project-scope enforcement: a McpToken principal can only
|
||||
// hit resources inside its bound project.
|
||||
if (request.mcpToken !== undefined) {
|
||||
const projectMatch = url.match(/^\/api\/v1\/projects\/([^/?]+)/);
|
||||
if (projectMatch?.[1]) {
|
||||
let targetProjectName = projectMatch[1];
|
||||
if (CUID_RE.test(targetProjectName)) {
|
||||
const entity = await projectRepo.findById(targetProjectName);
|
||||
if (entity) targetProjectName = entity.name;
|
||||
}
|
||||
if (targetProjectName !== request.mcpToken.projectName) {
|
||||
return reply.code(403).send({ error: 'Token is not valid for this project' });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let allowed: boolean;
|
||||
if (check.kind === 'operation') {
|
||||
allowed = await rbacService.canRunOperation(request.userId, check.operation, serviceAccountName);
|
||||
allowed = await rbacService.canRunOperation(request.userId, check.operation, serviceAccountName, mcpTokenSha);
|
||||
} else {
|
||||
// Resolve CUID → human name for name-scoped RBAC bindings
|
||||
if (check.resourceName !== undefined && CUID_RE.test(check.resourceName)) {
|
||||
@@ -363,10 +450,10 @@ async function main(): Promise<void> {
|
||||
if (entity) check.resourceName = entity.name;
|
||||
}
|
||||
}
|
||||
allowed = await rbacService.canAccess(request.userId, check.action, check.resource, check.resourceName, serviceAccountName);
|
||||
allowed = await rbacService.canAccess(request.userId, check.action, check.resource, check.resourceName, serviceAccountName, mcpTokenSha);
|
||||
// Compute scope for list filtering (used by preSerialization hook)
|
||||
if (allowed && check.resourceName === undefined) {
|
||||
request.rbacScope = await rbacService.getAllowedScope(request.userId, check.action, check.resource, serviceAccountName);
|
||||
request.rbacScope = await rbacService.getAllowedScope(request.userId, check.action, check.resource, serviceAccountName, mcpTokenSha);
|
||||
}
|
||||
}
|
||||
if (!allowed) {
|
||||
@@ -378,6 +465,8 @@ async function main(): Promise<void> {
|
||||
registerMcpServerRoutes(app, serverService, instanceService);
|
||||
registerTemplateRoutes(app, templateService);
|
||||
registerSecretRoutes(app, secretService);
|
||||
registerSecretBackendRoutes(app, secretBackendService);
|
||||
registerSecretMigrateRoutes(app, secretMigrateService);
|
||||
registerInstanceRoutes(app, instanceService);
|
||||
registerProjectRoutes(app, projectService);
|
||||
registerAuditLogRoutes(app, auditLogService);
|
||||
@@ -388,11 +477,12 @@ async function main(): Promise<void> {
|
||||
registerMcpProxyRoutes(app, {
|
||||
mcpProxyService,
|
||||
auditLogService,
|
||||
authDeps: { findSession: (token) => authService.findSession(token) },
|
||||
authDeps,
|
||||
});
|
||||
registerRbacRoutes(app, rbacDefinitionService);
|
||||
registerUserRoutes(app, userService);
|
||||
registerGroupRoutes(app, groupService);
|
||||
registerMcpTokenRoutes(app, { tokenService: mcpTokenService, projectRepo });
|
||||
registerPromptRoutes(app, promptService, projectRepo);
|
||||
|
||||
// ── Git-based backup ──
|
||||
|
||||
@@ -1,13 +1,41 @@
|
||||
import type { FastifyRequest, FastifyReply } from 'fastify';
|
||||
import { isMcpToken, hashToken } from '@mcpctl/shared';
|
||||
|
||||
export interface McpTokenPrincipal {
|
||||
tokenId: string;
|
||||
tokenName: string;
|
||||
tokenSha: string;
|
||||
projectId: string;
|
||||
projectName: string;
|
||||
ownerId: string;
|
||||
}
|
||||
|
||||
export interface McpTokenLookup {
|
||||
tokenId: string;
|
||||
tokenName: string;
|
||||
tokenSha: string;
|
||||
projectId: string;
|
||||
projectName: string;
|
||||
ownerId: string;
|
||||
expiresAt: Date | null;
|
||||
revokedAt: Date | null;
|
||||
}
|
||||
|
||||
export interface AuthDeps {
|
||||
findSession: (token: string) => Promise<{ userId: string; expiresAt: Date } | null>;
|
||||
/**
|
||||
* Look up an McpToken by SHA-256 hash. Optional — when absent, Bearer tokens
|
||||
* that look like `mcpctl_pat_…` are rejected (400).
|
||||
*/
|
||||
findMcpToken?: (tokenHash: string) => Promise<McpTokenLookup | null>;
|
||||
}
|
||||
|
||||
declare module 'fastify' {
|
||||
interface FastifyRequest {
|
||||
userId?: string;
|
||||
rbacScope?: { wildcard: boolean; names: Set<string> };
|
||||
/** Set by the auth hook when the caller authenticated via a McpToken bearer (prefix `mcpctl_pat_`). */
|
||||
mcpToken?: McpTokenPrincipal;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +53,37 @@ export function createAuthMiddleware(deps: AuthDeps) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Dispatch on the prefix: `mcpctl_pat_…` → McpToken path; anything else → session path.
|
||||
if (isMcpToken(token)) {
|
||||
if (deps.findMcpToken === undefined) {
|
||||
reply.code(401).send({ error: 'McpToken auth not enabled' });
|
||||
return;
|
||||
}
|
||||
const row = await deps.findMcpToken(hashToken(token));
|
||||
if (row === null) {
|
||||
reply.code(401).send({ error: 'Invalid token' });
|
||||
return;
|
||||
}
|
||||
if (row.revokedAt !== null) {
|
||||
reply.code(401).send({ error: 'Token revoked' });
|
||||
return;
|
||||
}
|
||||
if (row.expiresAt !== null && row.expiresAt < new Date()) {
|
||||
reply.code(401).send({ error: 'Token expired' });
|
||||
return;
|
||||
}
|
||||
request.userId = row.ownerId;
|
||||
request.mcpToken = {
|
||||
tokenId: row.tokenId,
|
||||
tokenName: row.tokenName,
|
||||
tokenSha: row.tokenSha,
|
||||
projectId: row.projectId,
|
||||
projectName: row.projectName,
|
||||
ownerId: row.ownerId,
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
const session = await deps.findSession(token);
|
||||
if (session === null) {
|
||||
reply.code(401).send({ error: 'Invalid token' });
|
||||
|
||||
@@ -30,6 +30,8 @@ export class AuditEventRepository implements IAuditEventRepository {
|
||||
correlationId: e.correlationId ?? null,
|
||||
parentEventId: e.parentEventId ?? null,
|
||||
userName: e.userName ?? null,
|
||||
tokenName: e.tokenName ?? null,
|
||||
tokenSha: e.tokenSha ?? null,
|
||||
payload: e.payload as Prisma.InputJsonValue,
|
||||
}));
|
||||
const result = await this.prisma.auditEvent.createMany({ data });
|
||||
@@ -132,6 +134,8 @@ function buildWhere(filter?: AuditEventFilter): Prisma.AuditEventWhereInput {
|
||||
if (filter.serverName !== undefined) where.serverName = filter.serverName;
|
||||
if (filter.correlationId !== undefined) where.correlationId = filter.correlationId;
|
||||
if (filter.userName !== undefined) where.userName = filter.userName;
|
||||
if (filter.tokenName !== undefined) where.tokenName = filter.tokenName;
|
||||
if (filter.tokenSha !== undefined) where.tokenSha = filter.tokenSha;
|
||||
|
||||
if (filter.from !== undefined || filter.to !== undefined) {
|
||||
const timestamp: Prisma.DateTimeFilter = {};
|
||||
|
||||
@@ -15,3 +15,5 @@ export type { IGroupRepository, GroupWithMembers } from './group.repository.js';
|
||||
export { GroupRepository } from './group.repository.js';
|
||||
export type { IAuditEventRepository, AuditEventFilter, AuditEventCreateInput } from './interfaces.js';
|
||||
export { AuditEventRepository } from './audit-event.repository.js';
|
||||
export type { IMcpTokenRepository, McpTokenFilter, McpTokenWithRelations, CreateMcpTokenRepoInput } from './interfaces.js';
|
||||
export { McpTokenRepository } from './mcp-token.repository.js';
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { McpServer, McpInstance, AuditLog, AuditEvent, Secret, InstanceStatus } from '@prisma/client';
|
||||
import type { McpServer, McpInstance, AuditLog, AuditEvent, McpToken, Secret, InstanceStatus } from '@prisma/client';
|
||||
import type { CreateMcpServerInput, UpdateMcpServerInput } from '../validation/mcp-server.schema.js';
|
||||
import type { CreateSecretInput, UpdateSecretInput } from '../validation/secret.schema.js';
|
||||
import type { SecretRepoCreateInput, SecretRepoUpdateInput } from './secret.repository.js';
|
||||
|
||||
export interface IMcpServerRepository {
|
||||
findAll(): Promise<McpServer[]>;
|
||||
@@ -24,8 +24,9 @@ export interface ISecretRepository {
|
||||
findAll(): Promise<Secret[]>;
|
||||
findById(id: string): Promise<Secret | null>;
|
||||
findByName(name: string): Promise<Secret | null>;
|
||||
create(data: CreateSecretInput): Promise<Secret>;
|
||||
update(id: string, data: UpdateSecretInput): Promise<Secret>;
|
||||
findByBackend(backendId: string): Promise<Secret[]>;
|
||||
create(data: SecretRepoCreateInput): Promise<Secret>;
|
||||
update(id: string, data: SecretRepoUpdateInput): Promise<Secret>;
|
||||
delete(id: string): Promise<void>;
|
||||
}
|
||||
|
||||
@@ -57,6 +58,8 @@ export interface AuditEventFilter {
|
||||
serverName?: string;
|
||||
correlationId?: string;
|
||||
userName?: string;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
from?: Date;
|
||||
to?: Date;
|
||||
limit?: number;
|
||||
@@ -74,6 +77,8 @@ export interface AuditEventCreateInput {
|
||||
correlationId?: string;
|
||||
parentEventId?: string;
|
||||
userName?: string;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
payload: Record<string, unknown>;
|
||||
}
|
||||
|
||||
@@ -95,3 +100,37 @@ export interface IAuditEventRepository {
|
||||
listSessions(filter?: { projectName?: string; userName?: string; from?: Date; to?: Date; limit?: number; offset?: number }): Promise<AuditSessionSummary[]>;
|
||||
countSessions(filter?: { projectName?: string; userName?: string; from?: Date; to?: Date }): Promise<number>;
|
||||
}
|
||||
|
||||
// ── MCP Tokens ──
|
||||
|
||||
export interface McpTokenFilter {
|
||||
projectId?: string;
|
||||
ownerId?: string;
|
||||
includeRevoked?: boolean;
|
||||
}
|
||||
|
||||
export interface CreateMcpTokenRepoInput {
|
||||
name: string;
|
||||
projectId: string;
|
||||
ownerId: string;
|
||||
tokenHash: string;
|
||||
tokenPrefix: string;
|
||||
description?: string;
|
||||
expiresAt?: Date | null;
|
||||
}
|
||||
|
||||
export type McpTokenWithRelations = McpToken & {
|
||||
project: { id: string; name: string };
|
||||
owner: { id: string; email: string };
|
||||
};
|
||||
|
||||
export interface IMcpTokenRepository {
|
||||
findAll(filter?: McpTokenFilter): Promise<McpTokenWithRelations[]>;
|
||||
findById(id: string): Promise<McpTokenWithRelations | null>;
|
||||
findByHash(tokenHash: string): Promise<McpTokenWithRelations | null>;
|
||||
findByNameAndProject(name: string, projectId: string): Promise<McpTokenWithRelations | null>;
|
||||
create(data: CreateMcpTokenRepoInput): Promise<McpTokenWithRelations>;
|
||||
revoke(id: string): Promise<McpTokenWithRelations>;
|
||||
touchLastUsed(id: string): Promise<void>;
|
||||
delete(id: string): Promise<void>;
|
||||
}
|
||||
|
||||
83
src/mcpd/src/repositories/mcp-token.repository.ts
Normal file
83
src/mcpd/src/repositories/mcp-token.repository.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import type { PrismaClient } from '@prisma/client';
|
||||
import type {
|
||||
IMcpTokenRepository,
|
||||
McpTokenFilter,
|
||||
McpTokenWithRelations,
|
||||
CreateMcpTokenRepoInput,
|
||||
} from './interfaces.js';
|
||||
|
||||
const INCLUDE_RELATIONS = {
|
||||
project: { select: { id: true, name: true } },
|
||||
owner: { select: { id: true, email: true } },
|
||||
} as const;
|
||||
|
||||
export class McpTokenRepository implements IMcpTokenRepository {
|
||||
constructor(private readonly prisma: PrismaClient) {}
|
||||
|
||||
async findAll(filter?: McpTokenFilter): Promise<McpTokenWithRelations[]> {
|
||||
const where: Record<string, unknown> = {};
|
||||
if (filter?.projectId !== undefined) where['projectId'] = filter.projectId;
|
||||
if (filter?.ownerId !== undefined) where['ownerId'] = filter.ownerId;
|
||||
if (!filter?.includeRevoked) where['revokedAt'] = null;
|
||||
return this.prisma.mcpToken.findMany({
|
||||
where,
|
||||
include: INCLUDE_RELATIONS,
|
||||
orderBy: { createdAt: 'desc' },
|
||||
}) as Promise<McpTokenWithRelations[]>;
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<McpTokenWithRelations | null> {
|
||||
return this.prisma.mcpToken.findUnique({
|
||||
where: { id },
|
||||
include: INCLUDE_RELATIONS,
|
||||
}) as Promise<McpTokenWithRelations | null>;
|
||||
}
|
||||
|
||||
async findByHash(tokenHash: string): Promise<McpTokenWithRelations | null> {
|
||||
return this.prisma.mcpToken.findUnique({
|
||||
where: { tokenHash },
|
||||
include: INCLUDE_RELATIONS,
|
||||
}) as Promise<McpTokenWithRelations | null>;
|
||||
}
|
||||
|
||||
async findByNameAndProject(name: string, projectId: string): Promise<McpTokenWithRelations | null> {
|
||||
return this.prisma.mcpToken.findUnique({
|
||||
where: { name_projectId: { name, projectId } },
|
||||
include: INCLUDE_RELATIONS,
|
||||
}) as Promise<McpTokenWithRelations | null>;
|
||||
}
|
||||
|
||||
async create(data: CreateMcpTokenRepoInput): Promise<McpTokenWithRelations> {
|
||||
return this.prisma.mcpToken.create({
|
||||
data: {
|
||||
name: data.name,
|
||||
projectId: data.projectId,
|
||||
ownerId: data.ownerId,
|
||||
tokenHash: data.tokenHash,
|
||||
tokenPrefix: data.tokenPrefix,
|
||||
description: data.description ?? '',
|
||||
expiresAt: data.expiresAt ?? null,
|
||||
},
|
||||
include: INCLUDE_RELATIONS,
|
||||
}) as Promise<McpTokenWithRelations>;
|
||||
}
|
||||
|
||||
async revoke(id: string): Promise<McpTokenWithRelations> {
|
||||
return this.prisma.mcpToken.update({
|
||||
where: { id },
|
||||
data: { revokedAt: new Date() },
|
||||
include: INCLUDE_RELATIONS,
|
||||
}) as Promise<McpTokenWithRelations>;
|
||||
}
|
||||
|
||||
async touchLastUsed(id: string): Promise<void> {
|
||||
await this.prisma.mcpToken.update({
|
||||
where: { id },
|
||||
data: { lastUsedAt: new Date() },
|
||||
});
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
await this.prisma.mcpToken.delete({ where: { id } });
|
||||
}
|
||||
}
|
||||
103
src/mcpd/src/repositories/secret-backend.repository.ts
Normal file
103
src/mcpd/src/repositories/secret-backend.repository.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import type { PrismaClient, SecretBackend, Prisma } from '@prisma/client';
|
||||
|
||||
export interface CreateSecretBackendInput {
|
||||
name: string;
|
||||
type: string;
|
||||
config?: Record<string, unknown>;
|
||||
isDefault?: boolean;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface UpdateSecretBackendInput {
|
||||
config?: Record<string, unknown>;
|
||||
isDefault?: boolean;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export interface ISecretBackendRepository {
|
||||
findAll(): Promise<SecretBackend[]>;
|
||||
findById(id: string): Promise<SecretBackend | null>;
|
||||
findByName(name: string): Promise<SecretBackend | null>;
|
||||
findDefault(): Promise<SecretBackend | null>;
|
||||
create(data: CreateSecretBackendInput): Promise<SecretBackend>;
|
||||
update(id: string, data: UpdateSecretBackendInput): Promise<SecretBackend>;
|
||||
/**
|
||||
* Atomically clear `isDefault` on every row except the one named, then set
|
||||
* the given row as default. Used by `setDefault`.
|
||||
*/
|
||||
setAsDefault(id: string): Promise<SecretBackend>;
|
||||
delete(id: string): Promise<void>;
|
||||
/** Count secrets that still reference this backend — used to guard delete. */
|
||||
countReferencingSecrets(backendId: string): Promise<number>;
|
||||
}
|
||||
|
||||
export class SecretBackendRepository implements ISecretBackendRepository {
|
||||
constructor(private readonly prisma: PrismaClient) {}
|
||||
|
||||
async findAll(): Promise<SecretBackend[]> {
|
||||
return this.prisma.secretBackend.findMany({ orderBy: { name: 'asc' } });
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<SecretBackend | null> {
|
||||
return this.prisma.secretBackend.findUnique({ where: { id } });
|
||||
}
|
||||
|
||||
async findByName(name: string): Promise<SecretBackend | null> {
|
||||
return this.prisma.secretBackend.findUnique({ where: { name } });
|
||||
}
|
||||
|
||||
async findDefault(): Promise<SecretBackend | null> {
|
||||
return this.prisma.secretBackend.findFirst({ where: { isDefault: true } });
|
||||
}
|
||||
|
||||
async create(data: CreateSecretBackendInput): Promise<SecretBackend> {
|
||||
return this.prisma.$transaction(async (tx) => {
|
||||
if (data.isDefault === true) {
|
||||
await tx.secretBackend.updateMany({ where: { isDefault: true }, data: { isDefault: false } });
|
||||
}
|
||||
return tx.secretBackend.create({
|
||||
data: {
|
||||
name: data.name,
|
||||
type: data.type,
|
||||
config: (data.config ?? {}) as Prisma.InputJsonValue,
|
||||
isDefault: data.isDefault ?? false,
|
||||
description: data.description ?? '',
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async update(id: string, data: UpdateSecretBackendInput): Promise<SecretBackend> {
|
||||
return this.prisma.$transaction(async (tx) => {
|
||||
if (data.isDefault === true) {
|
||||
await tx.secretBackend.updateMany({
|
||||
where: { isDefault: true, NOT: { id } },
|
||||
data: { isDefault: false },
|
||||
});
|
||||
}
|
||||
const updateData: Prisma.SecretBackendUpdateInput = {};
|
||||
if (data.config !== undefined) updateData.config = data.config as Prisma.InputJsonValue;
|
||||
if (data.isDefault !== undefined) updateData.isDefault = data.isDefault;
|
||||
if (data.description !== undefined) updateData.description = data.description;
|
||||
return tx.secretBackend.update({ where: { id }, data: updateData });
|
||||
});
|
||||
}
|
||||
|
||||
async setAsDefault(id: string): Promise<SecretBackend> {
|
||||
return this.prisma.$transaction(async (tx) => {
|
||||
await tx.secretBackend.updateMany({
|
||||
where: { isDefault: true, NOT: { id } },
|
||||
data: { isDefault: false },
|
||||
});
|
||||
return tx.secretBackend.update({ where: { id }, data: { isDefault: true } });
|
||||
});
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
await this.prisma.secretBackend.delete({ where: { id } });
|
||||
}
|
||||
|
||||
async countReferencingSecrets(backendId: string): Promise<number> {
|
||||
return this.prisma.secret.count({ where: { backendId } });
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,18 @@
|
||||
import { type PrismaClient, type Secret } from '@prisma/client';
|
||||
import { type PrismaClient, type Secret, type Prisma } from '@prisma/client';
|
||||
import type { ISecretRepository } from './interfaces.js';
|
||||
import type { CreateSecretInput, UpdateSecretInput } from '../validation/secret.schema.js';
|
||||
|
||||
export interface SecretRepoCreateInput {
|
||||
name: string;
|
||||
backendId: string;
|
||||
data?: Record<string, string>;
|
||||
externalRef?: string;
|
||||
}
|
||||
|
||||
export interface SecretRepoUpdateInput {
|
||||
data?: Record<string, string>;
|
||||
externalRef?: string;
|
||||
backendId?: string;
|
||||
}
|
||||
|
||||
export class SecretRepository implements ISecretRepository {
|
||||
constructor(private readonly prisma: PrismaClient) {}
|
||||
@@ -17,20 +29,29 @@ export class SecretRepository implements ISecretRepository {
|
||||
return this.prisma.secret.findUnique({ where: { name } });
|
||||
}
|
||||
|
||||
async create(data: CreateSecretInput): Promise<Secret> {
|
||||
async findByBackend(backendId: string): Promise<Secret[]> {
|
||||
return this.prisma.secret.findMany({ where: { backendId }, orderBy: { name: 'asc' } });
|
||||
}
|
||||
|
||||
async create(data: SecretRepoCreateInput): Promise<Secret> {
|
||||
return this.prisma.secret.create({
|
||||
data: {
|
||||
name: data.name,
|
||||
data: data.data,
|
||||
backendId: data.backendId,
|
||||
data: (data.data ?? {}) as Prisma.InputJsonValue,
|
||||
externalRef: data.externalRef ?? '',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async update(id: string, data: UpdateSecretInput): Promise<Secret> {
|
||||
return this.prisma.secret.update({
|
||||
where: { id },
|
||||
data: { data: data.data },
|
||||
});
|
||||
async update(id: string, data: SecretRepoUpdateInput): Promise<Secret> {
|
||||
const updateData: Prisma.SecretUpdateInput = {};
|
||||
if (data.data !== undefined) updateData.data = data.data as Prisma.InputJsonValue;
|
||||
if (data.externalRef !== undefined) updateData.externalRef = data.externalRef;
|
||||
if (data.backendId !== undefined) {
|
||||
updateData.backend = { connect: { id: data.backendId } };
|
||||
}
|
||||
return this.prisma.secret.update({ where: { id }, data: updateData });
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
|
||||
@@ -18,3 +18,5 @@ export { registerRbacRoutes } from './rbac-definitions.js';
|
||||
export { registerUserRoutes } from './users.js';
|
||||
export { registerGroupRoutes } from './groups.js';
|
||||
export { registerAuditEventRoutes } from './audit-events.js';
|
||||
export { registerMcpTokenRoutes } from './mcp-tokens.js';
|
||||
export type { McpTokenRouteDeps } from './mcp-tokens.js';
|
||||
|
||||
142
src/mcpd/src/routes/mcp-tokens.ts
Normal file
142
src/mcpd/src/routes/mcp-tokens.ts
Normal file
@@ -0,0 +1,142 @@
|
||||
import type { FastifyInstance, FastifyReply, FastifyRequest } from 'fastify';
|
||||
import { isMcpToken } from '@mcpctl/shared';
|
||||
import type { McpTokenService } from '../services/mcp-token.service.js';
|
||||
import { PermissionCeilingError } from '../services/mcp-token.service.js';
|
||||
import { NotFoundError, ConflictError } from '../services/mcp-server.service.js';
|
||||
import type { IProjectRepository } from '../repositories/project.repository.js';
|
||||
|
||||
export interface McpTokenRouteDeps {
|
||||
tokenService: McpTokenService;
|
||||
projectRepo: IProjectRepository;
|
||||
}
|
||||
|
||||
export function registerMcpTokenRoutes(app: FastifyInstance, deps: McpTokenRouteDeps): void {
|
||||
const { tokenService, projectRepo } = deps;
|
||||
|
||||
// ── List ─────────────────────────────────────────────────────────────
|
||||
app.get<{ Querystring: { projectId?: string; projectName?: string; includeRevoked?: string } }>(
|
||||
'/api/v1/mcptokens',
|
||||
async (request) => {
|
||||
const { projectId, projectName, includeRevoked } = request.query;
|
||||
|
||||
// Allow filtering by project name for CLI ergonomics.
|
||||
let resolvedProjectId = projectId;
|
||||
if (resolvedProjectId === undefined && projectName !== undefined) {
|
||||
const project = await projectRepo.findByName(projectName);
|
||||
if (project === null) throw new NotFoundError(`Project not found: ${projectName}`);
|
||||
resolvedProjectId = project.id;
|
||||
}
|
||||
|
||||
const filter: { projectId?: string; includeRevoked?: boolean } = {};
|
||||
if (resolvedProjectId !== undefined) filter.projectId = resolvedProjectId;
|
||||
if (includeRevoked === 'true') filter.includeRevoked = true;
|
||||
|
||||
const rows = await tokenService.list(filter);
|
||||
return rows.map(toListResponse);
|
||||
},
|
||||
);
|
||||
|
||||
// ── Describe ─────────────────────────────────────────────────────────
|
||||
app.get<{ Params: { id: string } }>('/api/v1/mcptokens/:id', async (request) => {
|
||||
const row = await tokenService.getById(request.params.id);
|
||||
return toListResponse(row);
|
||||
});
|
||||
|
||||
// ── Create ───────────────────────────────────────────────────────────
|
||||
app.post('/api/v1/mcptokens', async (request, reply) => {
|
||||
const userId = request.userId;
|
||||
if (userId === undefined) {
|
||||
reply.code(401);
|
||||
return { error: 'Not authenticated' };
|
||||
}
|
||||
|
||||
try {
|
||||
// Accept projectName OR projectId for CLI ergonomics.
|
||||
const body = (request.body ?? {}) as Record<string, unknown>;
|
||||
if (typeof body['projectName'] === 'string' && typeof body['projectId'] !== 'string') {
|
||||
const project = await projectRepo.findByName(body['projectName']);
|
||||
if (project === null) throw new NotFoundError(`Project not found: ${body['projectName']}`);
|
||||
body['projectId'] = project.id;
|
||||
}
|
||||
|
||||
const result = await tokenService.create(userId, body);
|
||||
reply.code(201);
|
||||
return {
|
||||
...toListResponse(result.mcpToken),
|
||||
token: result.raw,
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof NotFoundError) {
|
||||
reply.code(404);
|
||||
return { error: err.message };
|
||||
}
|
||||
if (err instanceof ConflictError) {
|
||||
reply.code(409);
|
||||
return { error: err.message };
|
||||
}
|
||||
if (err instanceof PermissionCeilingError) {
|
||||
reply.code(403);
|
||||
return { error: err.message };
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
||||
// ── Revoke (soft-delete) ────────────────────────────────────────────
|
||||
app.post<{ Params: { id: string } }>('/api/v1/mcptokens/:id/revoke', async (request) => {
|
||||
const row = await tokenService.revoke(request.params.id);
|
||||
return toListResponse(row);
|
||||
});
|
||||
|
||||
// ── Delete (hard) ────────────────────────────────────────────────────
|
||||
app.delete<{ Params: { id: string } }>('/api/v1/mcptokens/:id', async (request, reply) => {
|
||||
await tokenService.delete(request.params.id);
|
||||
reply.code(204);
|
||||
});
|
||||
|
||||
// ── Introspect ───────────────────────────────────────────────────────
|
||||
// Called by mcplocal's HTTP-mode auth preHandler to resolve a raw bearer
|
||||
// to principal info. Accepts a McpToken bearer directly — bypasses the
|
||||
// session-auth path.
|
||||
app.get('/api/v1/mcptokens/introspect', async (request: FastifyRequest, reply: FastifyReply) => {
|
||||
const header = request.headers.authorization;
|
||||
if (header === undefined || !header.startsWith('Bearer ')) {
|
||||
reply.code(401);
|
||||
return { ok: false, error: 'Missing Authorization' };
|
||||
}
|
||||
const token = header.slice(7);
|
||||
if (!isMcpToken(token)) {
|
||||
reply.code(401);
|
||||
return { ok: false, error: 'Not a mcptoken bearer' };
|
||||
}
|
||||
const result = await tokenService.introspectRaw(token);
|
||||
if (!result.ok) {
|
||||
reply.code(401);
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
function toListResponse(row: import('../repositories/interfaces.js').McpTokenWithRelations): Record<string, unknown> {
|
||||
return {
|
||||
id: row.id,
|
||||
name: row.name,
|
||||
projectId: row.projectId,
|
||||
projectName: row.project.name,
|
||||
tokenPrefix: row.tokenPrefix,
|
||||
ownerId: row.ownerId,
|
||||
ownerEmail: row.owner.email,
|
||||
description: row.description,
|
||||
createdAt: row.createdAt,
|
||||
expiresAt: row.expiresAt,
|
||||
lastUsedAt: row.lastUsedAt,
|
||||
revokedAt: row.revokedAt,
|
||||
status: statusOf(row),
|
||||
};
|
||||
}
|
||||
|
||||
function statusOf(row: import('../repositories/interfaces.js').McpTokenWithRelations): 'active' | 'revoked' | 'expired' {
|
||||
if (row.revokedAt !== null) return 'revoked';
|
||||
if (row.expiresAt !== null && row.expiresAt < new Date()) return 'expired';
|
||||
return 'active';
|
||||
}
|
||||
89
src/mcpd/src/routes/secret-backends.ts
Normal file
89
src/mcpd/src/routes/secret-backends.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import type { FastifyInstance } from 'fastify';
|
||||
import type { SecretBackendService } from '../services/secret-backend.service.js';
|
||||
import { SecretBackendInUseError } from '../services/secret-backend.service.js';
|
||||
import { NotFoundError, ConflictError } from '../services/mcp-server.service.js';
|
||||
|
||||
export function registerSecretBackendRoutes(
|
||||
app: FastifyInstance,
|
||||
service: SecretBackendService,
|
||||
): void {
|
||||
app.get('/api/v1/secretbackends', async () => {
|
||||
const rows = await service.list();
|
||||
return rows.map(redactConfig);
|
||||
});
|
||||
|
||||
app.get<{ Params: { id: string } }>('/api/v1/secretbackends/:id', async (request) => {
|
||||
const row = await service.getById(request.params.id);
|
||||
return redactConfig(row);
|
||||
});
|
||||
|
||||
app.post('/api/v1/secretbackends', async (request, reply) => {
|
||||
try {
|
||||
const row = await service.create(request.body as {
|
||||
name: string;
|
||||
type: string;
|
||||
config?: Record<string, unknown>;
|
||||
isDefault?: boolean;
|
||||
description?: string;
|
||||
});
|
||||
reply.code(201);
|
||||
return redactConfig(row);
|
||||
} catch (err) {
|
||||
if (err instanceof ConflictError) {
|
||||
reply.code(409);
|
||||
return { error: err.message };
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
||||
app.put<{ Params: { id: string } }>('/api/v1/secretbackends/:id', async (request) => {
|
||||
const row = await service.update(request.params.id, request.body as {
|
||||
config?: Record<string, unknown>;
|
||||
isDefault?: boolean;
|
||||
description?: string;
|
||||
});
|
||||
return redactConfig(row);
|
||||
});
|
||||
|
||||
app.post<{ Params: { id: string } }>('/api/v1/secretbackends/:id/default', async (request) => {
|
||||
const row = await service.setDefault(request.params.id);
|
||||
return redactConfig(row);
|
||||
});
|
||||
|
||||
app.delete<{ Params: { id: string } }>('/api/v1/secretbackends/:id', async (request, reply) => {
|
||||
try {
|
||||
await service.delete(request.params.id);
|
||||
reply.code(204);
|
||||
return null;
|
||||
} catch (err) {
|
||||
if (err instanceof SecretBackendInUseError) {
|
||||
reply.code(409);
|
||||
return { error: err.message };
|
||||
}
|
||||
if (err instanceof NotFoundError) {
|
||||
reply.code(404);
|
||||
return { error: err.message };
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Strip any value from `config` whose key looks like a credential, and replace
|
||||
* tokenSecretRef with a short description. Prevents accidental exposure via
|
||||
* GET responses.
|
||||
*/
|
||||
function redactConfig<T extends { config: unknown }>(row: T): T {
|
||||
const config = (row.config ?? {}) as Record<string, unknown>;
|
||||
const cleaned: Record<string, unknown> = {};
|
||||
for (const [k, v] of Object.entries(config)) {
|
||||
if (/token|secret|password|key/i.test(k) && typeof v === 'string') {
|
||||
cleaned[k] = '***';
|
||||
} else {
|
||||
cleaned[k] = v;
|
||||
}
|
||||
}
|
||||
return { ...row, config: cleaned };
|
||||
}
|
||||
41
src/mcpd/src/routes/secret-migrate.ts
Normal file
41
src/mcpd/src/routes/secret-migrate.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import type { FastifyInstance } from 'fastify';
|
||||
import type { SecretMigrateService } from '../services/secret-migrate.service.js';
|
||||
|
||||
export function registerSecretMigrateRoutes(
|
||||
app: FastifyInstance,
|
||||
service: SecretMigrateService,
|
||||
): void {
|
||||
/**
|
||||
* POST /api/v1/secrets/migrate
|
||||
* body: { from: string, to: string, names?: string[], keepSource?: boolean, dryRun?: boolean }
|
||||
* RBAC: operation `migrate-secrets` (role:run).
|
||||
*/
|
||||
app.post<{
|
||||
Body: {
|
||||
from: string;
|
||||
to: string;
|
||||
names?: string[];
|
||||
keepSource?: boolean;
|
||||
dryRun?: boolean;
|
||||
};
|
||||
}>('/api/v1/secrets/migrate', async (request, reply) => {
|
||||
const { from, to, names, keepSource, dryRun } = request.body;
|
||||
if (!from || !to) {
|
||||
reply.code(400);
|
||||
return { error: 'from and to are required' };
|
||||
}
|
||||
|
||||
if (dryRun === true) {
|
||||
const options: Parameters<SecretMigrateService['dryRun']>[0] = { from, to };
|
||||
if (names !== undefined) options.names = names;
|
||||
if (keepSource !== undefined) options.keepSource = keepSource;
|
||||
const secrets = await service.dryRun(options);
|
||||
return { dryRun: true, candidates: secrets.map((s) => ({ id: s.id, name: s.name })) };
|
||||
}
|
||||
|
||||
const options: Parameters<SecretMigrateService['migrate']>[0] = { from, to };
|
||||
if (names !== undefined) options.names = names;
|
||||
if (keepSource !== undefined) options.keepSource = keepSource;
|
||||
return service.migrate(options);
|
||||
});
|
||||
}
|
||||
@@ -9,6 +9,8 @@ export interface AuditEventQueryParams {
|
||||
serverName?: string;
|
||||
correlationId?: string;
|
||||
userName?: string;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
from?: string;
|
||||
to?: string;
|
||||
limit?: number;
|
||||
@@ -71,6 +73,8 @@ export class AuditEventService {
|
||||
if (params.serverName !== undefined) filter.serverName = params.serverName;
|
||||
if (params.correlationId !== undefined) filter.correlationId = params.correlationId;
|
||||
if (params.userName !== undefined) filter.userName = params.userName;
|
||||
if (params.tokenName !== undefined) filter.tokenName = params.tokenName;
|
||||
if (params.tokenSha !== undefined) filter.tokenSha = params.tokenSha;
|
||||
if (params.from !== undefined) filter.from = new Date(params.from);
|
||||
if (params.to !== undefined) filter.to = new Date(params.to);
|
||||
if (params.limit !== undefined) filter.limit = params.limit;
|
||||
|
||||
@@ -6,6 +6,7 @@ import type { IRbacDefinitionRepository } from '../../repositories/rbac-definiti
|
||||
import type { IPromptRepository } from '../../repositories/prompt.repository.js';
|
||||
import type { ITemplateRepository } from '../../repositories/template.repository.js';
|
||||
import type { RbacRoleBinding } from '../../validation/rbac-definition.schema.js';
|
||||
import type { SecretService } from '../secret.service.js';
|
||||
import { decrypt } from './crypto.js';
|
||||
import type { BackupBundle } from './backup-service.js';
|
||||
|
||||
@@ -41,6 +42,7 @@ export class RestoreService {
|
||||
private serverRepo: IMcpServerRepository,
|
||||
private projectRepo: IProjectRepository,
|
||||
private secretRepo: ISecretRepository,
|
||||
private secretService: SecretService,
|
||||
private userRepo?: IUserRepository,
|
||||
private groupRepo?: IGroupRepository,
|
||||
private rbacRepo?: IRbacDefinitionRepository,
|
||||
@@ -125,16 +127,13 @@ export class RestoreService {
|
||||
result.secretsSkipped++;
|
||||
continue;
|
||||
}
|
||||
// overwrite
|
||||
await this.secretRepo.update(existing.id, { data: secret.data });
|
||||
// overwrite — route through SecretService so backend dispatch applies.
|
||||
await this.secretService.update(existing.id, { data: secret.data });
|
||||
result.secretsCreated++;
|
||||
continue;
|
||||
}
|
||||
|
||||
await this.secretRepo.create({
|
||||
name: secret.name,
|
||||
data: secret.data,
|
||||
});
|
||||
await this.secretService.create({ name: secret.name, data: secret.data });
|
||||
result.secretsCreated++;
|
||||
} catch (err) {
|
||||
result.errors.push(`Failed to restore secret "${secret.name}": ${err instanceof Error ? err.message : String(err)}`);
|
||||
|
||||
@@ -1,42 +1,44 @@
|
||||
import type { McpServer } from '@prisma/client';
|
||||
import type { ISecretRepository } from '../repositories/interfaces.js';
|
||||
import type { ServerEnvEntry } from '../validation/mcp-server.schema.js';
|
||||
|
||||
/**
|
||||
* Minimal dependency surface for the env resolver: anything that can turn a
|
||||
* (secretName, key) pair into a string. Matches `SecretService.resolve()` so
|
||||
* resolution now flows through the configured SecretBackend driver instead
|
||||
* of reading `Secret.data` directly.
|
||||
*/
|
||||
export interface SecretResolver {
|
||||
resolve(secretName: string, key: string): Promise<string>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a server's env entries into a flat key-value map.
|
||||
* - Inline `value` entries are used directly.
|
||||
* - `valueFrom.secretRef` entries are looked up from the secret repository.
|
||||
* - `valueFrom.secretRef` entries are looked up through the resolver.
|
||||
* Throws if a referenced secret or key is missing.
|
||||
*/
|
||||
export async function resolveServerEnv(
|
||||
server: McpServer,
|
||||
secretRepo: ISecretRepository,
|
||||
resolver: SecretResolver,
|
||||
): Promise<Record<string, string>> {
|
||||
const entries = server.env as ServerEnvEntry[];
|
||||
if (!entries || entries.length === 0) return {};
|
||||
|
||||
const result: Record<string, string> = {};
|
||||
const secretCache = new Map<string, Record<string, string>>();
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.value !== undefined) {
|
||||
result[entry.name] = entry.value;
|
||||
} else if (entry.valueFrom?.secretRef) {
|
||||
const { name: secretName, key } = entry.valueFrom.secretRef;
|
||||
|
||||
if (!secretCache.has(secretName)) {
|
||||
const secret = await secretRepo.findByName(secretName);
|
||||
if (!secret) {
|
||||
throw new Error(`Secret '${secretName}' not found (referenced by server '${server.name}' env '${entry.name}')`);
|
||||
}
|
||||
secretCache.set(secretName, secret.data as Record<string, string>);
|
||||
try {
|
||||
result[entry.name] = await resolver.resolve(secretName, key);
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
throw new Error(
|
||||
`Cannot resolve secret for server '${server.name}' env '${entry.name}': ${msg}`,
|
||||
);
|
||||
}
|
||||
|
||||
const data = secretCache.get(secretName)!;
|
||||
if (!(key in data)) {
|
||||
throw new Error(`Key '${key}' not found in secret '${secretName}' (referenced by server '${server.name}' env '${entry.name}')`);
|
||||
}
|
||||
result[entry.name] = data[key]!;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -34,3 +34,5 @@ export { UserService } from './user.service.js';
|
||||
export { GroupService } from './group.service.js';
|
||||
export { AuditEventService } from './audit-event.service.js';
|
||||
export type { AuditEventQueryParams } from './audit-event.service.js';
|
||||
export { McpTokenService, PermissionCeilingError } from './mcp-token.service.js';
|
||||
export type { CreateMcpTokenResult, IntrospectResult } from './mcp-token.service.js';
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import type { McpInstance } from '@prisma/client';
|
||||
import type { IMcpInstanceRepository, IMcpServerRepository, ISecretRepository } from '../repositories/interfaces.js';
|
||||
import type { IMcpInstanceRepository, IMcpServerRepository } from '../repositories/interfaces.js';
|
||||
import type { McpOrchestrator, ContainerSpec, ContainerInfo } from './orchestrator.js';
|
||||
import { NotFoundError } from './mcp-server.service.js';
|
||||
import { resolveServerEnv } from './env-resolver.js';
|
||||
import { resolveServerEnv, type SecretResolver } from './env-resolver.js';
|
||||
|
||||
/** Runner images for package-based MCP servers, keyed by runtime name. */
|
||||
const RUNNER_IMAGES: Record<string, string> = {
|
||||
@@ -26,7 +26,7 @@ export class InstanceService {
|
||||
private instanceRepo: IMcpInstanceRepository,
|
||||
private serverRepo: IMcpServerRepository,
|
||||
private orchestrator: McpOrchestrator,
|
||||
private secretRepo?: ISecretRepository,
|
||||
private secretResolver?: SecretResolver,
|
||||
) {}
|
||||
|
||||
async list(serverId?: string): Promise<McpInstance[]> {
|
||||
@@ -284,9 +284,9 @@ export class InstanceService {
|
||||
}
|
||||
|
||||
// Resolve env vars from inline values and secret refs
|
||||
if (this.secretRepo) {
|
||||
if (this.secretResolver) {
|
||||
try {
|
||||
const resolvedEnv = await resolveServerEnv(server, this.secretRepo);
|
||||
const resolvedEnv = await resolveServerEnv(server, this.secretResolver);
|
||||
if (Object.keys(resolvedEnv).length > 0) {
|
||||
spec.env = resolvedEnv;
|
||||
}
|
||||
|
||||
222
src/mcpd/src/services/mcp-token.service.ts
Normal file
222
src/mcpd/src/services/mcp-token.service.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
import { generateToken, hashToken } from '@mcpctl/shared';
|
||||
import type { McpToken } from '@prisma/client';
|
||||
import type { IMcpTokenRepository, McpTokenWithRelations, McpTokenFilter } from '../repositories/interfaces.js';
|
||||
import type { IRbacDefinitionRepository } from '../repositories/rbac-definition.repository.js';
|
||||
import type { IProjectRepository } from '../repositories/project.repository.js';
|
||||
import { CreateMcpTokenSchema } from '../validation/mcp-token.schema.js';
|
||||
import { isResourceBinding, type RbacRoleBinding, type RbacSubject } from '../validation/rbac-definition.schema.js';
|
||||
import type { RbacService, Permission } from './rbac.service.js';
|
||||
import { ROLE_ACTIONS_FOR_CEILING } from './rbac.service.js';
|
||||
import { NotFoundError, ConflictError } from './mcp-server.service.js';
|
||||
|
||||
/** Thrown when the requesting user tries to mint a token with bindings they cannot grant themselves. */
|
||||
export class PermissionCeilingError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'PermissionCeilingError';
|
||||
}
|
||||
}
|
||||
|
||||
export interface CreateMcpTokenResult {
|
||||
/** The database row (with project/owner relations). */
|
||||
mcpToken: McpTokenWithRelations;
|
||||
/** The raw bearer token — shown exactly once. */
|
||||
raw: string;
|
||||
}
|
||||
|
||||
export interface IntrospectResult {
|
||||
ok: boolean;
|
||||
tokenId?: string;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
projectId?: string;
|
||||
projectName?: string;
|
||||
ownerId?: string;
|
||||
expired?: boolean;
|
||||
revoked?: boolean;
|
||||
}
|
||||
|
||||
export class McpTokenService {
|
||||
constructor(
|
||||
private readonly tokenRepo: IMcpTokenRepository,
|
||||
private readonly projectRepo: IProjectRepository,
|
||||
private readonly rbacRepo: IRbacDefinitionRepository,
|
||||
private readonly rbacService: RbacService,
|
||||
) {}
|
||||
|
||||
async list(filter?: McpTokenFilter): Promise<McpTokenWithRelations[]> {
|
||||
return this.tokenRepo.findAll(filter);
|
||||
}
|
||||
|
||||
async getById(id: string): Promise<McpTokenWithRelations> {
|
||||
const row = await this.tokenRepo.findById(id);
|
||||
if (row === null) throw new NotFoundError(`McpToken not found: ${id}`);
|
||||
return row;
|
||||
}
|
||||
|
||||
/** Hash + lookup a raw bearer. Returns the row if valid and active; null if missing, revoked, or expired. */
|
||||
async introspectRaw(raw: string): Promise<IntrospectResult> {
|
||||
const hash = hashToken(raw);
|
||||
const row = await this.tokenRepo.findByHash(hash);
|
||||
if (row === null) return { ok: false };
|
||||
|
||||
const now = new Date();
|
||||
const revoked = row.revokedAt !== null;
|
||||
const expired = row.expiresAt !== null && row.expiresAt < now;
|
||||
|
||||
if (revoked || expired) {
|
||||
return {
|
||||
ok: false,
|
||||
tokenId: row.id,
|
||||
tokenName: row.name,
|
||||
tokenSha: row.tokenHash,
|
||||
revoked,
|
||||
expired,
|
||||
};
|
||||
}
|
||||
|
||||
// Best-effort last-used tracking (don't block on this).
|
||||
this.tokenRepo.touchLastUsed(row.id).catch(() => { /* ignore */ });
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
tokenId: row.id,
|
||||
tokenName: row.name,
|
||||
tokenSha: row.tokenHash,
|
||||
projectId: row.projectId,
|
||||
projectName: row.project.name,
|
||||
ownerId: row.ownerId,
|
||||
expired: false,
|
||||
revoked: false,
|
||||
};
|
||||
}
|
||||
|
||||
async create(creatorUserId: string, input: unknown): Promise<CreateMcpTokenResult> {
|
||||
const data = CreateMcpTokenSchema.parse(input);
|
||||
|
||||
const project = await this.projectRepo.findById(data.projectId);
|
||||
if (project === null) throw new NotFoundError(`Project not found: ${data.projectId}`);
|
||||
|
||||
const existing = await this.tokenRepo.findByNameAndProject(data.name, data.projectId);
|
||||
if (existing !== null && existing.revokedAt === null) {
|
||||
throw new ConflictError(`McpToken already exists: ${data.name} in project ${project.name}`);
|
||||
}
|
||||
|
||||
// Resolve the effective bindings:
|
||||
// base = rbacMode === 'clone' ? snapshot(creator) : []
|
||||
// effective = base + explicit bindings
|
||||
const basePerms = data.rbacMode === 'clone'
|
||||
? await this.rbacService.getPermissions(creatorUserId)
|
||||
: [];
|
||||
const baseBindings = basePerms.map(permissionToBinding);
|
||||
const effectiveBindings: RbacRoleBinding[] = [...baseBindings, ...data.bindings];
|
||||
|
||||
// Creator ceiling: every effective binding must be within what creator can do.
|
||||
// Cloned bindings are trivially satisfied; explicit ones may not be.
|
||||
for (const binding of data.bindings) {
|
||||
const violation = await this.checkCeiling(creatorUserId, binding);
|
||||
if (violation !== null) throw new PermissionCeilingError(violation);
|
||||
}
|
||||
|
||||
// Generate the token
|
||||
const { raw, hash, prefix } = generateToken();
|
||||
|
||||
// Normalize expiresAt
|
||||
let expiresAt: Date | null = null;
|
||||
if (data.expiresAt !== undefined && data.expiresAt !== null) {
|
||||
expiresAt = typeof data.expiresAt === 'string' ? new Date(data.expiresAt) : data.expiresAt;
|
||||
}
|
||||
|
||||
const createArgs: {
|
||||
name: string;
|
||||
projectId: string;
|
||||
ownerId: string;
|
||||
tokenHash: string;
|
||||
tokenPrefix: string;
|
||||
description?: string;
|
||||
expiresAt: Date | null;
|
||||
} = {
|
||||
name: data.name,
|
||||
projectId: data.projectId,
|
||||
ownerId: creatorUserId,
|
||||
tokenHash: hash,
|
||||
tokenPrefix: prefix,
|
||||
expiresAt,
|
||||
};
|
||||
if (data.description !== undefined) createArgs.description = data.description;
|
||||
const row = await this.tokenRepo.create(createArgs);
|
||||
|
||||
// If the token has bindings, auto-create an RbacDefinition so the token is a real RBAC principal.
|
||||
if (effectiveBindings.length > 0) {
|
||||
const subject: RbacSubject = { kind: 'McpToken', name: hash };
|
||||
await this.rbacRepo.create({
|
||||
name: rbacDefNameFor(row),
|
||||
subjects: [subject],
|
||||
roleBindings: effectiveBindings,
|
||||
});
|
||||
}
|
||||
|
||||
return { mcpToken: row, raw };
|
||||
}
|
||||
|
||||
async revoke(id: string): Promise<McpTokenWithRelations> {
|
||||
const existing = await this.getById(id);
|
||||
const row = await this.tokenRepo.revoke(id);
|
||||
// Remove the RBAC definition so the token's bindings stop resolving immediately.
|
||||
await this.deleteRbacDefinitionFor(existing).catch(() => { /* ignore */ });
|
||||
return row;
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
const existing = await this.getById(id);
|
||||
await this.deleteRbacDefinitionFor(existing).catch(() => { /* ignore */ });
|
||||
await this.tokenRepo.delete(id);
|
||||
}
|
||||
|
||||
private async deleteRbacDefinitionFor(row: McpToken): Promise<void> {
|
||||
const name = rbacDefNameFor(row);
|
||||
const existing = await this.rbacRepo.findByName(name);
|
||||
if (existing === null) return;
|
||||
await this.rbacRepo.delete(existing.id);
|
||||
}
|
||||
|
||||
/**
|
||||
* For a single requested binding, return null if the creator can grant it,
|
||||
* or a human-readable reason string if they cannot.
|
||||
*/
|
||||
private async checkCeiling(creatorUserId: string, binding: RbacRoleBinding): Promise<string | null> {
|
||||
if (isResourceBinding(binding)) {
|
||||
const grantedActions = ROLE_ACTIONS_FOR_CEILING[binding.role] ?? [];
|
||||
for (const action of grantedActions) {
|
||||
const ok = await this.rbacService.canAccess(
|
||||
creatorUserId,
|
||||
action,
|
||||
binding.resource,
|
||||
binding.name,
|
||||
);
|
||||
if (!ok) {
|
||||
return `Ceiling violation: you do not have permission '${action}' on ${binding.resource}${binding.name !== undefined ? `/${binding.name}` : ''}`;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
// Operation binding
|
||||
const ok = await this.rbacService.canRunOperation(creatorUserId, binding.action);
|
||||
if (!ok) return `Ceiling violation: you cannot run operation '${binding.action}'`;
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function permissionToBinding(p: Permission): RbacRoleBinding {
|
||||
if ('resource' in p) {
|
||||
return p.name !== undefined
|
||||
? { role: p.role as RbacRoleBinding extends { role: infer R } ? R : never, resource: p.resource, name: p.name } as RbacRoleBinding
|
||||
: { role: p.role, resource: p.resource } as RbacRoleBinding;
|
||||
}
|
||||
return { role: 'run', action: p.action };
|
||||
}
|
||||
|
||||
function rbacDefNameFor(row: { id: string }): string {
|
||||
// Must match the regex in CreateRbacDefinitionSchema (lowercase alphanumeric with hyphens).
|
||||
return `mcptoken-${row.id.toLowerCase()}`;
|
||||
}
|
||||
@@ -38,6 +38,9 @@ const ROLE_ACTIONS: Record<string, readonly RbacAction[]> = {
|
||||
expose: ['expose', 'view'],
|
||||
};
|
||||
|
||||
/** Exported alias for permission-ceiling checks elsewhere (e.g. McpTokenService). */
|
||||
export const ROLE_ACTIONS_FOR_CEILING = ROLE_ACTIONS;
|
||||
|
||||
export class RbacService {
|
||||
constructor(
|
||||
private readonly rbacRepo: IRbacDefinitionRepository,
|
||||
@@ -50,8 +53,8 @@ export class RbacService {
|
||||
* If provided, name-scoped bindings only match when their name equals this.
|
||||
* If omitted (listing), name-scoped bindings still grant access.
|
||||
*/
|
||||
async canAccess(userId: string, action: RbacAction, resource: string, resourceName?: string, serviceAccountName?: string): Promise<boolean> {
|
||||
const permissions = await this.getPermissions(userId, serviceAccountName);
|
||||
async canAccess(userId: string, action: RbacAction, resource: string, resourceName?: string, serviceAccountName?: string, mcpTokenSha?: string): Promise<boolean> {
|
||||
const permissions = await this.getPermissions(userId, serviceAccountName, mcpTokenSha);
|
||||
const normalized = normalizeResource(resource);
|
||||
|
||||
for (const perm of permissions) {
|
||||
@@ -73,8 +76,8 @@ export class RbacService {
|
||||
* Check whether a user is allowed to perform a named operation.
|
||||
* Operations require an explicit 'run' role binding with a matching action.
|
||||
*/
|
||||
async canRunOperation(userId: string, operation: string, serviceAccountName?: string): Promise<boolean> {
|
||||
const permissions = await this.getPermissions(userId, serviceAccountName);
|
||||
async canRunOperation(userId: string, operation: string, serviceAccountName?: string, mcpTokenSha?: string): Promise<boolean> {
|
||||
const permissions = await this.getPermissions(userId, serviceAccountName, mcpTokenSha);
|
||||
|
||||
for (const perm of permissions) {
|
||||
if ('action' in perm && perm.role === 'run' && perm.action === operation) {
|
||||
@@ -90,8 +93,8 @@ export class RbacService {
|
||||
* Returns wildcard:true if any matching binding is unscoped (no name constraint).
|
||||
* Returns wildcard:false with a set of allowed names if all bindings are name-scoped.
|
||||
*/
|
||||
async getAllowedScope(userId: string, action: RbacAction, resource: string, serviceAccountName?: string): Promise<AllowedScope> {
|
||||
const permissions = await this.getPermissions(userId, serviceAccountName);
|
||||
async getAllowedScope(userId: string, action: RbacAction, resource: string, serviceAccountName?: string, mcpTokenSha?: string): Promise<AllowedScope> {
|
||||
const permissions = await this.getPermissions(userId, serviceAccountName, mcpTokenSha);
|
||||
const normalized = normalizeResource(resource);
|
||||
const names = new Set<string>();
|
||||
|
||||
@@ -113,13 +116,13 @@ export class RbacService {
|
||||
/**
|
||||
* Collect all permissions for a user across all matching RbacDefinitions.
|
||||
*/
|
||||
async getPermissions(userId: string, serviceAccountName?: string): Promise<Permission[]> {
|
||||
async getPermissions(userId: string, serviceAccountName?: string, mcpTokenSha?: string): Promise<Permission[]> {
|
||||
// 1. Resolve user email
|
||||
const user = await this.prisma.user.findUnique({
|
||||
where: { id: userId },
|
||||
select: { email: true },
|
||||
});
|
||||
if (user === null && serviceAccountName === undefined) return [];
|
||||
if (user === null && serviceAccountName === undefined && mcpTokenSha === undefined) return [];
|
||||
|
||||
// 2. Resolve group names the user belongs to
|
||||
let groupNames: string[] = [];
|
||||
@@ -142,6 +145,7 @@ export class RbacService {
|
||||
if (s.kind === 'User') return user !== null && s.name === user.email;
|
||||
if (s.kind === 'Group') return groupNames.includes(s.name);
|
||||
if (s.kind === 'ServiceAccount') return serviceAccountName !== undefined && s.name === serviceAccountName;
|
||||
if (s.kind === 'McpToken') return mcpTokenSha !== undefined && s.name === mcpTokenSha;
|
||||
return false;
|
||||
});
|
||||
|
||||
|
||||
88
src/mcpd/src/services/secret-backend.service.ts
Normal file
88
src/mcpd/src/services/secret-backend.service.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import type { SecretBackend } from '@prisma/client';
|
||||
import type { ISecretBackendRepository } from '../repositories/secret-backend.repository.js';
|
||||
import type { SecretBackendDriver } from './secret-backends/types.js';
|
||||
import { createDriver, type DriverFactoryDeps } from './secret-backends/factory.js';
|
||||
import { NotFoundError, ConflictError } from './mcp-server.service.js';
|
||||
|
||||
export class SecretBackendInUseError extends Error {
|
||||
constructor(backendName: string, count: number) {
|
||||
super(`SecretBackend '${backendName}' is still referenced by ${String(count)} secret(s); migrate them first`);
|
||||
this.name = 'SecretBackendInUseError';
|
||||
}
|
||||
}
|
||||
|
||||
export class SecretBackendService {
|
||||
private driverCache = new Map<string, SecretBackendDriver>(); // keyed by backend id
|
||||
|
||||
constructor(
|
||||
private readonly repo: ISecretBackendRepository,
|
||||
private readonly driverDeps: DriverFactoryDeps,
|
||||
) {}
|
||||
|
||||
async list(): Promise<SecretBackend[]> {
|
||||
return this.repo.findAll();
|
||||
}
|
||||
|
||||
async getById(id: string): Promise<SecretBackend> {
|
||||
const row = await this.repo.findById(id);
|
||||
if (row === null) throw new NotFoundError(`SecretBackend not found: ${id}`);
|
||||
return row;
|
||||
}
|
||||
|
||||
async getByName(name: string): Promise<SecretBackend> {
|
||||
const row = await this.repo.findByName(name);
|
||||
if (row === null) throw new NotFoundError(`SecretBackend not found: ${name}`);
|
||||
return row;
|
||||
}
|
||||
|
||||
async getDefault(): Promise<SecretBackend> {
|
||||
const row = await this.repo.findDefault();
|
||||
if (row === null) {
|
||||
throw new Error('No default SecretBackend configured. This shouldn\'t happen — the plaintext row should have been seeded on startup.');
|
||||
}
|
||||
return row;
|
||||
}
|
||||
|
||||
async create(input: {
|
||||
name: string;
|
||||
type: string;
|
||||
config?: Record<string, unknown>;
|
||||
isDefault?: boolean;
|
||||
description?: string;
|
||||
}): Promise<SecretBackend> {
|
||||
if (!input.name || !input.type) throw new Error('name and type are required');
|
||||
const existing = await this.repo.findByName(input.name);
|
||||
if (existing !== null) throw new ConflictError(`SecretBackend already exists: ${input.name}`);
|
||||
return this.repo.create(input);
|
||||
}
|
||||
|
||||
async update(id: string, input: { config?: Record<string, unknown>; isDefault?: boolean; description?: string }): Promise<SecretBackend> {
|
||||
await this.getById(id);
|
||||
const row = await this.repo.update(id, input);
|
||||
this.driverCache.delete(id); // config may have changed; rebuild lazily
|
||||
return row;
|
||||
}
|
||||
|
||||
async setDefault(id: string): Promise<SecretBackend> {
|
||||
await this.getById(id);
|
||||
return this.repo.setAsDefault(id);
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
const row = await this.getById(id);
|
||||
const count = await this.repo.countReferencingSecrets(id);
|
||||
if (count > 0) throw new SecretBackendInUseError(row.name, count);
|
||||
if (row.isDefault) throw new Error(`Cannot delete the default SecretBackend '${row.name}'; promote another one first`);
|
||||
await this.repo.delete(id);
|
||||
this.driverCache.delete(id);
|
||||
}
|
||||
|
||||
/** Get the driver for a given backend id, creating + caching on first call. */
|
||||
driverFor(backend: SecretBackend): SecretBackendDriver {
|
||||
const cached = this.driverCache.get(backend.id);
|
||||
if (cached) return cached;
|
||||
const driver = createDriver(backend, this.driverDeps);
|
||||
this.driverCache.set(backend.id, driver);
|
||||
return driver;
|
||||
}
|
||||
}
|
||||
43
src/mcpd/src/services/secret-backends/factory.ts
Normal file
43
src/mcpd/src/services/secret-backends/factory.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
/**
|
||||
* Build a `SecretBackendDriver` from a `SecretBackend` row.
|
||||
*
|
||||
* Lives separate from the service because it's the only place aware of every
|
||||
* driver type — adding a new backend means adding one case here and one
|
||||
* driver file. Everything else (service, routes, CLI) is type-agnostic.
|
||||
*/
|
||||
import type { SecretBackend } from '@prisma/client';
|
||||
import type { SecretBackendDriver, SecretRefResolver } from './types.js';
|
||||
import { PlaintextDriver, type PlaintextDriverDeps } from './plaintext.js';
|
||||
import { OpenBaoDriver, type OpenBaoConfig } from './openbao.js';
|
||||
|
||||
export interface DriverFactoryDeps {
|
||||
plaintext: PlaintextDriverDeps;
|
||||
/** Resolves `{secretName, key}` against the plaintext backend — used by remote drivers' auth. */
|
||||
secretRefResolver: SecretRefResolver;
|
||||
/** Overridable for tests. */
|
||||
fetch?: typeof globalThis.fetch;
|
||||
}
|
||||
|
||||
export function createDriver(row: SecretBackend, deps: DriverFactoryDeps): SecretBackendDriver {
|
||||
switch (row.type) {
|
||||
case 'plaintext':
|
||||
return new PlaintextDriver(deps.plaintext);
|
||||
|
||||
case 'openbao': {
|
||||
const cfg = row.config as unknown as OpenBaoConfig;
|
||||
if (!cfg.url || !cfg.tokenSecretRef?.name || !cfg.tokenSecretRef?.key) {
|
||||
throw new Error(
|
||||
`SecretBackend '${row.name}' (openbao): config must provide url + tokenSecretRef {name, key}`,
|
||||
);
|
||||
}
|
||||
const driverDeps: { fetch?: typeof globalThis.fetch; secretRefResolver: SecretRefResolver } = {
|
||||
secretRefResolver: deps.secretRefResolver,
|
||||
};
|
||||
if (deps.fetch !== undefined) driverDeps.fetch = deps.fetch;
|
||||
return new OpenBaoDriver(cfg, driverDeps);
|
||||
}
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown SecretBackend type: ${row.type}`);
|
||||
}
|
||||
}
|
||||
133
src/mcpd/src/services/secret-backends/openbao.ts
Normal file
133
src/mcpd/src/services/secret-backends/openbao.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
/**
|
||||
* OpenBao (MPL 2.0 fork of HashiCorp Vault) driver for the KV v2 secrets engine.
|
||||
*
|
||||
* Uses the plain HTTP API — no third-party client — so we don't pick up a
|
||||
* Vault SDK licensing headache. Endpoints touched:
|
||||
*
|
||||
* POST <url>/v1/<mount>/data/<path> -- write
|
||||
* GET <url>/v1/<mount>/data/<path> -- read latest
|
||||
* DELETE <url>/v1/<mount>/metadata/<path> -- full delete (all versions)
|
||||
* LIST <url>/v1/<mount>/metadata/ -- for migration
|
||||
*
|
||||
* Auth: static token for v1. The token is stored in a `Secret` on the
|
||||
* plaintext backend (see `config.tokenSecretRef = { name, key }`); the driver
|
||||
* resolves it on construction via the injected `SecretRefResolver`. Follow-up
|
||||
* work (not here) adds Kubernetes ServiceAccount auth.
|
||||
*
|
||||
* Path layout inside OpenBao:
|
||||
* <mount>/<pathPrefix>/<secretName>
|
||||
* `mount` and `pathPrefix` come from the backend's `config` JSON; defaults are
|
||||
* `secret` and `mcpctl/`.
|
||||
*/
|
||||
import type { SecretBackendDriver, SecretData, ExternalRef, SecretRefResolver } from './types.js';
|
||||
|
||||
export interface OpenBaoConfig {
|
||||
url: string;
|
||||
mount?: string;
|
||||
pathPrefix?: string;
|
||||
namespace?: string;
|
||||
tokenSecretRef: { name: string; key: string };
|
||||
}
|
||||
|
||||
export interface OpenBaoDriverDeps {
|
||||
/** Injected HTTP fetcher — mockable in tests. */
|
||||
fetch?: typeof globalThis.fetch;
|
||||
secretRefResolver: SecretRefResolver;
|
||||
}
|
||||
|
||||
export class OpenBaoDriver implements SecretBackendDriver {
|
||||
readonly kind = 'openbao';
|
||||
|
||||
private readonly url: string;
|
||||
private readonly mount: string;
|
||||
private readonly pathPrefix: string;
|
||||
private readonly namespace: string | undefined;
|
||||
private readonly tokenSecretRef: { name: string; key: string };
|
||||
private readonly fetchImpl: typeof globalThis.fetch;
|
||||
private readonly resolver: SecretRefResolver;
|
||||
private cachedToken: string | undefined;
|
||||
|
||||
constructor(config: OpenBaoConfig, deps: OpenBaoDriverDeps) {
|
||||
this.url = config.url.replace(/\/+$/, '');
|
||||
this.mount = (config.mount ?? 'secret').replace(/^\/|\/$/g, '');
|
||||
this.pathPrefix = (config.pathPrefix ?? 'mcpctl').replace(/^\/|\/$/g, '');
|
||||
if (config.namespace !== undefined) this.namespace = config.namespace;
|
||||
this.tokenSecretRef = config.tokenSecretRef;
|
||||
this.fetchImpl = deps.fetch ?? globalThis.fetch;
|
||||
this.resolver = deps.secretRefResolver;
|
||||
}
|
||||
|
||||
async read(input: { name: string; externalRef: ExternalRef; data: SecretData }): Promise<SecretData> {
|
||||
const path = this.pathFor(input.name);
|
||||
const res = await this.request('GET', `/v1/${this.mount}/data/${path}`);
|
||||
if (res.status === 404) {
|
||||
throw new Error(`OpenBao: secret '${input.name}' not found at ${path}`);
|
||||
}
|
||||
if (!res.ok) throw new Error(`OpenBao read ${path}: HTTP ${res.status}`);
|
||||
const body = await res.json() as { data?: { data?: SecretData } };
|
||||
return body.data?.data ?? {};
|
||||
}
|
||||
|
||||
async write(input: { name: string; data: SecretData }): Promise<{ externalRef: ExternalRef; storedData: SecretData }> {
|
||||
const path = this.pathFor(input.name);
|
||||
const res = await this.request('POST', `/v1/${this.mount}/data/${path}`, { data: input.data });
|
||||
if (!res.ok) throw new Error(`OpenBao write ${path}: HTTP ${res.status}`);
|
||||
return { externalRef: `${this.mount}/${path}`, storedData: {} };
|
||||
}
|
||||
|
||||
async delete(input: { name: string; externalRef: ExternalRef }): Promise<void> {
|
||||
const path = this.pathFor(input.name);
|
||||
const res = await this.request('DELETE', `/v1/${this.mount}/metadata/${path}`);
|
||||
if (!res.ok && res.status !== 404) {
|
||||
throw new Error(`OpenBao delete ${path}: HTTP ${res.status}`);
|
||||
}
|
||||
}
|
||||
|
||||
async list(): Promise<Array<{ name: string; externalRef: ExternalRef }>> {
|
||||
const listPath = this.pathPrefix === '' ? '' : `${this.pathPrefix}/`;
|
||||
const res = await this.request('LIST', `/v1/${this.mount}/metadata/${listPath}`);
|
||||
if (res.status === 404) return [];
|
||||
if (!res.ok) throw new Error(`OpenBao list: HTTP ${res.status}`);
|
||||
const body = await res.json() as { data?: { keys?: string[] } };
|
||||
const keys = body.data?.keys ?? [];
|
||||
return keys
|
||||
.filter((k) => !k.endsWith('/'))
|
||||
.map((k) => ({
|
||||
name: k,
|
||||
externalRef: `${this.mount}/${this.pathPrefix === '' ? '' : `${this.pathPrefix}/`}${k}`,
|
||||
}));
|
||||
}
|
||||
|
||||
async healthCheck(): Promise<{ ok: boolean; detail?: string }> {
|
||||
try {
|
||||
const res = await this.request('GET', '/v1/sys/health');
|
||||
return { ok: res.ok, detail: `HTTP ${res.status}` };
|
||||
} catch (err) {
|
||||
return { ok: false, detail: err instanceof Error ? err.message : String(err) };
|
||||
}
|
||||
}
|
||||
|
||||
private pathFor(name: string): string {
|
||||
const safe = encodeURIComponent(name);
|
||||
return this.pathPrefix === '' ? safe : `${this.pathPrefix}/${safe}`;
|
||||
}
|
||||
|
||||
private async getToken(): Promise<string> {
|
||||
if (this.cachedToken !== undefined) return this.cachedToken;
|
||||
const token = await this.resolver.resolve(this.tokenSecretRef.name, this.tokenSecretRef.key);
|
||||
this.cachedToken = token;
|
||||
return token;
|
||||
}
|
||||
|
||||
private async request(method: string, path: string, body?: unknown): Promise<Response> {
|
||||
const token = await this.getToken();
|
||||
const headers: Record<string, string> = { 'X-Vault-Token': token };
|
||||
if (this.namespace !== undefined) headers['X-Vault-Namespace'] = this.namespace;
|
||||
if (body !== undefined) headers['Content-Type'] = 'application/json';
|
||||
|
||||
const init: RequestInit = { method, headers };
|
||||
if (body !== undefined) init.body = JSON.stringify(body);
|
||||
|
||||
return this.fetchImpl(`${this.url}${path}`, init);
|
||||
}
|
||||
}
|
||||
44
src/mcpd/src/services/secret-backends/plaintext.ts
Normal file
44
src/mcpd/src/services/secret-backends/plaintext.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
/**
|
||||
* Plaintext backend driver — stores Secret.data directly in the DB column.
|
||||
*
|
||||
* This is the bootstrap/default backend. It always exists (seeded on startup)
|
||||
* so the system can hold its own backends' auth credentials (e.g. OpenBao
|
||||
* token) somewhere before the real backend is configured.
|
||||
*
|
||||
* The driver is deliberately almost a no-op: the service writes to and reads
|
||||
* from `Secret.data` directly. We still route through the driver interface so
|
||||
* the service layer can stay uniform.
|
||||
*/
|
||||
import type { SecretBackendDriver, SecretData, ExternalRef } from './types.js';
|
||||
|
||||
export interface PlaintextDriverDeps {
|
||||
/** Queries `prisma.secret.findMany(...)` for the `list` method (migration path). */
|
||||
listAllPlaintext: () => Promise<Array<{ name: string; data: SecretData }>>;
|
||||
}
|
||||
|
||||
export class PlaintextDriver implements SecretBackendDriver {
|
||||
readonly kind = 'plaintext';
|
||||
|
||||
constructor(private readonly deps: PlaintextDriverDeps) {}
|
||||
|
||||
async read(input: { name: string; externalRef: ExternalRef; data: SecretData }): Promise<SecretData> {
|
||||
return input.data;
|
||||
}
|
||||
|
||||
async write(input: { name: string; data: SecretData }): Promise<{ externalRef: ExternalRef; storedData: SecretData }> {
|
||||
return { externalRef: '', storedData: input.data };
|
||||
}
|
||||
|
||||
async delete(_input: { name: string; externalRef: ExternalRef }): Promise<void> {
|
||||
// The row deletion itself is the secret service's job; nothing remote to clean up here.
|
||||
}
|
||||
|
||||
async list(): Promise<Array<{ name: string; externalRef: ExternalRef }>> {
|
||||
const rows = await this.deps.listAllPlaintext();
|
||||
return rows.map((r) => ({ name: r.name, externalRef: '' }));
|
||||
}
|
||||
|
||||
async healthCheck(): Promise<{ ok: boolean; detail?: string }> {
|
||||
return { ok: true, detail: 'plaintext backend (DB)' };
|
||||
}
|
||||
}
|
||||
68
src/mcpd/src/services/secret-backends/types.ts
Normal file
68
src/mcpd/src/services/secret-backends/types.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
/**
|
||||
* SecretBackend driver interface.
|
||||
*
|
||||
* The plaintext backend stores `data` in the DB column directly.
|
||||
* Remote backends (openbao, vault, cloud KV) store an opaque `externalRef`
|
||||
* and fetch the actual data on demand.
|
||||
*
|
||||
* Drivers are stateless factories keyed on a `SecretBackend` config row.
|
||||
* Secret management (CRUD, naming) stays in the service layer; drivers
|
||||
* handle only the storage I/O.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Opaque reference written by a driver on `write` and read back on `read`.
|
||||
*
|
||||
* For the plaintext driver this is unused — the data itself lives in
|
||||
* `Secret.data`. For openbao it's a string like `secret/data/mcpctl/mysecret`
|
||||
* that tells the driver where to fetch on next `read`.
|
||||
*/
|
||||
export type ExternalRef = string;
|
||||
|
||||
/** The shape of secret data — a flat map of key → value. */
|
||||
export type SecretData = Record<string, string>;
|
||||
|
||||
export interface SecretBackendDriver {
|
||||
/** Human-readable identifier, included in errors. */
|
||||
readonly kind: string;
|
||||
|
||||
/**
|
||||
* Read the stored secret. For plaintext this is a no-op — the data is
|
||||
* already in the Secret row and passed in here for symmetry. For remote
|
||||
* backends this makes the network call.
|
||||
*/
|
||||
read(input: { name: string; externalRef: ExternalRef; data: SecretData }): Promise<SecretData>;
|
||||
|
||||
/**
|
||||
* Store a new secret (or a new version of an existing one). Returns the
|
||||
* reference (or an empty string for plaintext) + the `data` object that
|
||||
* should be persisted on the Secret row (empty for remote backends).
|
||||
*/
|
||||
write(input: { name: string; data: SecretData }): Promise<{ externalRef: ExternalRef; storedData: SecretData }>;
|
||||
|
||||
/** Remove the secret from the backend. Idempotent — missing is OK. */
|
||||
delete(input: { name: string; externalRef: ExternalRef }): Promise<void>;
|
||||
|
||||
/** List everything the backend knows about. Used for migration + drift detection. */
|
||||
list(): Promise<Array<{ name: string; externalRef: ExternalRef }>>;
|
||||
|
||||
/** Optional: health probe. Used by `mcpctl describe secretbackend`. */
|
||||
healthCheck?(): Promise<{ ok: boolean; detail?: string }>;
|
||||
}
|
||||
|
||||
/** Stored config for a SecretBackend row; dispatched on `type`. */
|
||||
export interface BackendRow {
|
||||
id: string;
|
||||
name: string;
|
||||
type: string;
|
||||
config: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dependency passed to the openbao driver so it can resolve its own auth
|
||||
* token (stored in the plaintext backend — chicken-and-egg bootstrap).
|
||||
* Implemented by the SecretService so we don't have a circular import.
|
||||
*/
|
||||
export interface SecretRefResolver {
|
||||
resolve(secretName: string, key: string): Promise<string>;
|
||||
}
|
||||
113
src/mcpd/src/services/secret-migrate.service.ts
Normal file
113
src/mcpd/src/services/secret-migrate.service.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
/**
|
||||
* Move secrets from one SecretBackend to another.
|
||||
*
|
||||
* Per-secret atomicity: for each secret we
|
||||
* 1. resolve the data via the source driver,
|
||||
* 2. write it to the destination driver,
|
||||
* 3. update the Secret row (flip backendId + set new externalRef, clear data),
|
||||
* 4. optionally delete from source.
|
||||
*
|
||||
* If the process dies between 2 and 3, the destination has an orphan entry
|
||||
* but the row still points at the source — restart is idempotent (skips rows
|
||||
* already on destination). We never run a batch-wide transaction because each
|
||||
* remote driver write is a real HTTP call that can't roll back.
|
||||
*/
|
||||
import type { Secret } from '@prisma/client';
|
||||
import type { ISecretRepository } from '../repositories/interfaces.js';
|
||||
import type { SecretBackendService } from './secret-backend.service.js';
|
||||
|
||||
export interface MigrateOptions {
|
||||
/** Source backend name. */
|
||||
from: string;
|
||||
/** Destination backend name. */
|
||||
to: string;
|
||||
/** If provided, only migrate secrets with these names. Otherwise migrate all. */
|
||||
names?: string[];
|
||||
/** Leave the source copy intact after migration. Default false. */
|
||||
keepSource?: boolean;
|
||||
}
|
||||
|
||||
export interface MigrateResult {
|
||||
migrated: Array<{ name: string }>;
|
||||
skipped: Array<{ name: string; reason: string }>;
|
||||
failed: Array<{ name: string; error: string }>;
|
||||
}
|
||||
|
||||
export class SecretMigrateService {
|
||||
constructor(
|
||||
private readonly secretRepo: ISecretRepository,
|
||||
private readonly backends: SecretBackendService,
|
||||
) {}
|
||||
|
||||
async migrate(opts: MigrateOptions): Promise<MigrateResult> {
|
||||
const source = await this.backends.getByName(opts.from);
|
||||
const dest = await this.backends.getByName(opts.to);
|
||||
if (source.id === dest.id) {
|
||||
return { migrated: [], skipped: [], failed: [{ name: '*', error: 'source and destination are the same backend' }] };
|
||||
}
|
||||
|
||||
const sourceDriver = this.backends.driverFor(source);
|
||||
const destDriver = this.backends.driverFor(dest);
|
||||
|
||||
let secrets = await this.secretRepo.findByBackend(source.id);
|
||||
if (opts.names && opts.names.length > 0) {
|
||||
const wanted = new Set(opts.names);
|
||||
secrets = secrets.filter((s) => wanted.has(s.name));
|
||||
}
|
||||
|
||||
const result: MigrateResult = { migrated: [], skipped: [], failed: [] };
|
||||
for (const secret of secrets) {
|
||||
try {
|
||||
// Skip if somehow already on destination (re-run safety).
|
||||
if (secret.backendId === dest.id) {
|
||||
result.skipped.push({ name: secret.name, reason: 'already on destination' });
|
||||
continue;
|
||||
}
|
||||
|
||||
const data = await sourceDriver.read({
|
||||
name: secret.name,
|
||||
externalRef: secret.externalRef,
|
||||
data: secret.data as Record<string, string>,
|
||||
});
|
||||
const written = await destDriver.write({ name: secret.name, data });
|
||||
|
||||
await this.secretRepo.update(secret.id, {
|
||||
backendId: dest.id,
|
||||
data: written.storedData,
|
||||
externalRef: written.externalRef,
|
||||
});
|
||||
|
||||
if (opts.keepSource !== true) {
|
||||
await sourceDriver.delete({ name: secret.name, externalRef: secret.externalRef })
|
||||
.catch((err: unknown) => {
|
||||
// Destination is intact; best-effort source cleanup. Log + continue.
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
result.skipped.push({ name: secret.name, reason: `migrated OK; source cleanup failed: ${msg}` });
|
||||
});
|
||||
}
|
||||
|
||||
result.migrated.push({ name: secret.name });
|
||||
} catch (err) {
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
result.failed.push({ name: secret.name, error: msg });
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/** Track which secrets would be touched by a migrate run, without performing it. */
|
||||
async dryRun(opts: MigrateOptions): Promise<Array<Secret>> {
|
||||
const source = await this.backends.getByName(opts.from);
|
||||
let secrets = await this.secretRepo.findByBackend(source.id);
|
||||
if (opts.names && opts.names.length > 0) {
|
||||
const wanted = new Set(opts.names);
|
||||
secrets = secrets.filter((s) => wanted.has(s.name));
|
||||
}
|
||||
return secrets;
|
||||
}
|
||||
}
|
||||
|
||||
export interface SecretMigrateRouteDeps {
|
||||
migrateService: SecretMigrateService;
|
||||
}
|
||||
@@ -1,10 +1,23 @@
|
||||
/**
|
||||
* SecretService — CRUD over `Secret` rows.
|
||||
*
|
||||
* Dispatches storage I/O through the `SecretBackendService`: on create/update
|
||||
* the default backend's driver writes, and the resulting {externalRef,
|
||||
* storedData} is persisted on the row. On read (`resolveData`) the row's
|
||||
* `backendId` selects the driver, which fetches the actual data.
|
||||
*/
|
||||
import type { Secret } from '@prisma/client';
|
||||
import type { ISecretRepository } from '../repositories/interfaces.js';
|
||||
import type { SecretBackendService } from './secret-backend.service.js';
|
||||
import { CreateSecretSchema, UpdateSecretSchema } from '../validation/secret.schema.js';
|
||||
import { NotFoundError, ConflictError } from './mcp-server.service.js';
|
||||
import type { SecretRefResolver } from './secret-backends/types.js';
|
||||
|
||||
export class SecretService {
|
||||
constructor(private readonly repo: ISecretRepository) {}
|
||||
export class SecretService implements SecretRefResolver {
|
||||
constructor(
|
||||
private readonly repo: ISecretRepository,
|
||||
private readonly backends: SecretBackendService,
|
||||
) {}
|
||||
|
||||
async list(): Promise<Secret[]> {
|
||||
return this.repo.findAll();
|
||||
@@ -26,47 +39,79 @@ export class SecretService {
|
||||
return secret;
|
||||
}
|
||||
|
||||
/** Return the secret's actual data by dispatching through its backend driver. */
|
||||
async resolveData(secret: Secret): Promise<Record<string, string>> {
|
||||
const backend = await this.backends.getById(secret.backendId);
|
||||
const driver = this.backends.driverFor(backend);
|
||||
return driver.read({
|
||||
name: secret.name,
|
||||
externalRef: secret.externalRef,
|
||||
data: secret.data as Record<string, string>,
|
||||
});
|
||||
}
|
||||
|
||||
/** Convenience: resolve {secretName, key} → string. Implements SecretRefResolver. */
|
||||
async resolve(secretName: string, key: string): Promise<string> {
|
||||
const secret = await this.getByName(secretName);
|
||||
const data = await this.resolveData(secret);
|
||||
const value = data[key];
|
||||
if (value === undefined) {
|
||||
throw new NotFoundError(`Secret '${secretName}' has no key '${key}'`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
async create(input: unknown): Promise<Secret> {
|
||||
const data = CreateSecretSchema.parse(input);
|
||||
|
||||
const existing = await this.repo.findByName(data.name);
|
||||
if (existing !== null) {
|
||||
throw new ConflictError(`Secret already exists: ${data.name}`);
|
||||
}
|
||||
|
||||
return this.repo.create(data);
|
||||
const backend = await this.backends.getDefault();
|
||||
const driver = this.backends.driverFor(backend);
|
||||
const written = await driver.write({ name: data.name, data: data.data });
|
||||
return this.repo.create({
|
||||
name: data.name,
|
||||
backendId: backend.id,
|
||||
data: written.storedData,
|
||||
externalRef: written.externalRef,
|
||||
});
|
||||
}
|
||||
|
||||
async update(id: string, input: unknown): Promise<Secret> {
|
||||
const data = UpdateSecretSchema.parse(input);
|
||||
|
||||
// Verify exists
|
||||
await this.getById(id);
|
||||
|
||||
return this.repo.update(id, data);
|
||||
const existing = await this.getById(id);
|
||||
const backend = await this.backends.getById(existing.backendId);
|
||||
const driver = this.backends.driverFor(backend);
|
||||
const written = await driver.write({ name: existing.name, data: data.data });
|
||||
return this.repo.update(id, {
|
||||
data: written.storedData,
|
||||
externalRef: written.externalRef,
|
||||
});
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
// Verify exists
|
||||
await this.getById(id);
|
||||
const existing = await this.getById(id);
|
||||
const backend = await this.backends.getById(existing.backendId);
|
||||
const driver = this.backends.driverFor(backend);
|
||||
await driver.delete({ name: existing.name, externalRef: existing.externalRef });
|
||||
await this.repo.delete(id);
|
||||
}
|
||||
|
||||
// ── Backup/restore helpers ──
|
||||
// ── Backup/restore helpers (preserved) ──
|
||||
|
||||
async upsertByName(data: Record<string, unknown>): Promise<Secret> {
|
||||
const name = data['name'] as string;
|
||||
const existing = await this.repo.findByName(name);
|
||||
if (existing !== null) {
|
||||
const { name: _, ...updateFields } = data;
|
||||
return this.repo.update(existing.id, updateFields as Parameters<ISecretRepository['update']>[1]);
|
||||
return this.update(existing.id, data);
|
||||
}
|
||||
return this.repo.create(data as Parameters<ISecretRepository['create']>[0]);
|
||||
return this.create(data);
|
||||
}
|
||||
|
||||
async deleteByName(name: string): Promise<void> {
|
||||
const existing = await this.repo.findByName(name);
|
||||
if (existing === null) return;
|
||||
await this.repo.delete(existing.id);
|
||||
await this.delete(existing.id);
|
||||
}
|
||||
}
|
||||
|
||||
21
src/mcpd/src/validation/mcp-token.schema.ts
Normal file
21
src/mcpd/src/validation/mcp-token.schema.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { z } from 'zod';
|
||||
import { RbacRoleBindingSchema } from './rbac-definition.schema.js';
|
||||
|
||||
export const McpTokenRbacMode = z.enum(['empty', 'clone']);
|
||||
export type McpTokenRbacMode = z.infer<typeof McpTokenRbacMode>;
|
||||
|
||||
export const CreateMcpTokenSchema = z.object({
|
||||
name: z
|
||||
.string()
|
||||
.min(1)
|
||||
.max(100)
|
||||
.regex(/^[a-z0-9-]+$/, 'Name must be lowercase alphanumeric with hyphens'),
|
||||
projectId: z.string().min(1),
|
||||
description: z.string().optional(),
|
||||
expiresAt: z.union([z.string().datetime(), z.date(), z.null()]).optional(),
|
||||
rbacMode: McpTokenRbacMode.default('empty'),
|
||||
/** Explicit bindings, added on top of the `rbacMode` base (empty or clone). */
|
||||
bindings: z.array(RbacRoleBindingSchema).default([]),
|
||||
});
|
||||
|
||||
export type CreateMcpTokenInput = z.infer<typeof CreateMcpTokenSchema>;
|
||||
@@ -1,7 +1,7 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export const RBAC_ROLES = ['edit', 'view', 'create', 'delete', 'run', 'expose'] as const;
|
||||
export const RBAC_RESOURCES = ['*', 'servers', 'instances', 'secrets', 'projects', 'templates', 'users', 'groups', 'rbac', 'prompts', 'promptrequests'] as const;
|
||||
export const RBAC_RESOURCES = ['*', 'servers', 'instances', 'secrets', 'secretbackends', 'projects', 'templates', 'users', 'groups', 'rbac', 'prompts', 'promptrequests', 'mcptokens'] as const;
|
||||
|
||||
/** Singular→plural map for resource names. */
|
||||
const RESOURCE_ALIASES: Record<string, string> = {
|
||||
@@ -14,6 +14,8 @@ const RESOURCE_ALIASES: Record<string, string> = {
|
||||
group: 'groups',
|
||||
prompt: 'prompts',
|
||||
promptrequest: 'promptrequests',
|
||||
mcptoken: 'mcptokens',
|
||||
secretbackend: 'secretbackends',
|
||||
};
|
||||
|
||||
/** Normalize a resource name to its canonical plural form. */
|
||||
@@ -22,7 +24,7 @@ export function normalizeResource(resource: string): string {
|
||||
}
|
||||
|
||||
export const RbacSubjectSchema = z.object({
|
||||
kind: z.enum(['User', 'Group', 'ServiceAccount']),
|
||||
kind: z.enum(['User', 'Group', 'ServiceAccount', 'McpToken']),
|
||||
name: z.string().min(1),
|
||||
});
|
||||
|
||||
|
||||
@@ -99,3 +99,76 @@ describe('auth middleware', () => {
|
||||
expect(findSession).toHaveBeenCalledWith('my-token');
|
||||
});
|
||||
});
|
||||
|
||||
describe('auth middleware — McpToken dispatch', () => {
|
||||
async function setupAppWithMcpToken(deps: Parameters<typeof createAuthMiddleware>[0]) {
|
||||
app = Fastify({ logger: false });
|
||||
const authMiddleware = createAuthMiddleware(deps);
|
||||
app.addHook('preHandler', authMiddleware);
|
||||
app.get('/protected', async (request) => ({
|
||||
userId: request.userId,
|
||||
mcpToken: request.mcpToken,
|
||||
}));
|
||||
return app.ready();
|
||||
}
|
||||
|
||||
it('routes mcpctl_pat_ bearers to findMcpToken and skips findSession', async () => {
|
||||
const findSession = vi.fn(async () => null);
|
||||
const findMcpToken = vi.fn(async () => ({
|
||||
tokenId: 'ctok1',
|
||||
tokenName: 'mytok',
|
||||
tokenSha: 'deadbeef',
|
||||
projectId: 'cproj1',
|
||||
projectName: 'myproj',
|
||||
ownerId: 'cuser1',
|
||||
expiresAt: null,
|
||||
revokedAt: null,
|
||||
}));
|
||||
await setupAppWithMcpToken({ findSession, findMcpToken });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/protected',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_abcdefghij' },
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
expect(findSession).not.toHaveBeenCalled();
|
||||
expect(findMcpToken).toHaveBeenCalledTimes(1);
|
||||
const body = res.json<{ userId: string; mcpToken: { tokenName: string; projectName: string } }>();
|
||||
expect(body.userId).toBe('cuser1');
|
||||
expect(body.mcpToken.tokenName).toBe('mytok');
|
||||
expect(body.mcpToken.projectName).toBe('myproj');
|
||||
});
|
||||
|
||||
it('returns 401 for a revoked McpToken', async () => {
|
||||
await setupAppWithMcpToken({
|
||||
findSession: async () => null,
|
||||
findMcpToken: async () => ({
|
||||
tokenId: 'ctok1',
|
||||
tokenName: 'mytok',
|
||||
tokenSha: 'x',
|
||||
projectId: 'p',
|
||||
projectName: 'p',
|
||||
ownerId: 'u',
|
||||
expiresAt: null,
|
||||
revokedAt: new Date(),
|
||||
}),
|
||||
});
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/protected',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_revoked' },
|
||||
});
|
||||
expect(res.statusCode).toBe(401);
|
||||
expect(res.json<{ error: string }>().error).toContain('revoked');
|
||||
});
|
||||
|
||||
it('returns 401 when a mcpctl_pat_ bearer arrives but findMcpToken is not configured', async () => {
|
||||
await setupAppWithMcpToken({ findSession: async () => null });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/protected',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_no-lookup-wired' },
|
||||
});
|
||||
expect(res.statusCode).toBe(401);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,6 +9,25 @@ import type { IProjectRepository } from '../src/repositories/project.repository.
|
||||
import type { IUserRepository } from '../src/repositories/user.repository.js';
|
||||
import type { IGroupRepository } from '../src/repositories/group.repository.js';
|
||||
import type { IRbacDefinitionRepository } from '../src/repositories/rbac-definition.repository.js';
|
||||
import type { SecretService } from '../src/services/secret.service.js';
|
||||
|
||||
/**
|
||||
* Minimal SecretService shim over a mock repo — just the `.create()` / `.update()`
|
||||
* methods that RestoreService calls. We don't need the backend-dispatch path
|
||||
* here since the restore happy-path tests don't exercise remote backends.
|
||||
*/
|
||||
function mockSecretService(repo: ISecretRepository): SecretService {
|
||||
return {
|
||||
create: vi.fn(async (input: unknown) => {
|
||||
const data = input as { name: string; data: Record<string, string> };
|
||||
return repo.create({ name: data.name, backendId: 'backend-plaintext', data: data.data, externalRef: '' });
|
||||
}),
|
||||
update: vi.fn(async (id: string, input: unknown) => {
|
||||
const data = input as { data: Record<string, string> };
|
||||
return repo.update(id, { data: data.data });
|
||||
}),
|
||||
} as unknown as SecretService;
|
||||
}
|
||||
|
||||
// Mock data
|
||||
const mockServers = [
|
||||
@@ -295,7 +314,7 @@ describe('RestoreService', () => {
|
||||
(userRepo.findByEmail as ReturnType<typeof vi.fn>).mockResolvedValue(null);
|
||||
(groupRepo.findByName as ReturnType<typeof vi.fn>).mockResolvedValue(null);
|
||||
(rbacRepo.findByName as ReturnType<typeof vi.fn>).mockResolvedValue(null);
|
||||
restoreService = new RestoreService(serverRepo, projectRepo, secretRepo, userRepo, groupRepo, rbacRepo);
|
||||
restoreService = new RestoreService(serverRepo, projectRepo, secretRepo, mockSecretService(secretRepo), userRepo, groupRepo, rbacRepo);
|
||||
});
|
||||
|
||||
const validBundle = {
|
||||
@@ -576,7 +595,7 @@ describe('Backup Routes', () => {
|
||||
(rGroupRepo.findByName as ReturnType<typeof vi.fn>).mockResolvedValue(null);
|
||||
const rRbacRepo = mockRbacRepo();
|
||||
(rRbacRepo.findByName as ReturnType<typeof vi.fn>).mockResolvedValue(null);
|
||||
restoreService = new RestoreService(rSRepo, rPrRepo, rSecRepo, rUserRepo, rGroupRepo, rRbacRepo);
|
||||
restoreService = new RestoreService(rSRepo, rPrRepo, rSecRepo, mockSecretService(rSecRepo), rUserRepo, rGroupRepo, rRbacRepo);
|
||||
});
|
||||
|
||||
async function buildApp() {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { resolveServerEnv } from '../src/services/env-resolver.js';
|
||||
import type { ISecretRepository } from '../src/repositories/interfaces.js';
|
||||
import { resolveServerEnv, type SecretResolver } from '../src/services/env-resolver.js';
|
||||
import type { McpServer } from '@prisma/client';
|
||||
|
||||
function makeServer(env: unknown[]): McpServer {
|
||||
@@ -23,18 +22,16 @@ function makeServer(env: unknown[]): McpServer {
|
||||
} as McpServer;
|
||||
}
|
||||
|
||||
function mockSecretRepo(secrets: Record<string, Record<string, string>>): ISecretRepository {
|
||||
/** A SecretResolver backed by a {secretName: {key: value}} map. */
|
||||
function mockResolver(secrets: Record<string, Record<string, string>>): SecretResolver {
|
||||
return {
|
||||
findAll: vi.fn(async () => []),
|
||||
findById: vi.fn(async () => null),
|
||||
findByName: vi.fn(async (name: string) => {
|
||||
resolve: vi.fn(async (name: string, key: string): Promise<string> => {
|
||||
const data = secrets[name];
|
||||
if (!data) return null;
|
||||
return { id: `sec-${name}`, name, data, version: 1, createdAt: new Date(), updatedAt: new Date() };
|
||||
if (!data) throw new Error(`Secret '${name}' not found`);
|
||||
const value = data[key];
|
||||
if (value === undefined) throw new Error(`Key '${key}' not found in secret '${name}'`);
|
||||
return value;
|
||||
}),
|
||||
create: vi.fn(async () => ({} as never)),
|
||||
update: vi.fn(async () => ({} as never)),
|
||||
delete: vi.fn(async () => {}),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -44,8 +41,7 @@ describe('resolveServerEnv', () => {
|
||||
{ name: 'FOO', value: 'bar' },
|
||||
{ name: 'BAZ', value: 'qux' },
|
||||
]);
|
||||
const repo = mockSecretRepo({});
|
||||
const result = await resolveServerEnv(server, repo);
|
||||
const result = await resolveServerEnv(server, mockResolver({}));
|
||||
expect(result).toEqual({ FOO: 'bar', BAZ: 'qux' });
|
||||
});
|
||||
|
||||
@@ -53,10 +49,8 @@ describe('resolveServerEnv', () => {
|
||||
const server = makeServer([
|
||||
{ name: 'TOKEN', valueFrom: { secretRef: { name: 'ha-creds', key: 'HOMEASSISTANT_TOKEN' } } },
|
||||
]);
|
||||
const repo = mockSecretRepo({
|
||||
'ha-creds': { HOMEASSISTANT_TOKEN: 'secret-token-123' },
|
||||
});
|
||||
const result = await resolveServerEnv(server, repo);
|
||||
const resolver = mockResolver({ 'ha-creds': { HOMEASSISTANT_TOKEN: 'secret-token-123' } });
|
||||
const result = await resolveServerEnv(server, resolver);
|
||||
expect(result).toEqual({ TOKEN: 'secret-token-123' });
|
||||
});
|
||||
|
||||
@@ -65,48 +59,42 @@ describe('resolveServerEnv', () => {
|
||||
{ name: 'URL', value: 'https://ha.local' },
|
||||
{ name: 'TOKEN', valueFrom: { secretRef: { name: 'creds', key: 'TOKEN' } } },
|
||||
]);
|
||||
const repo = mockSecretRepo({
|
||||
creds: { TOKEN: 'my-token' },
|
||||
});
|
||||
const result = await resolveServerEnv(server, repo);
|
||||
const resolver = mockResolver({ creds: { TOKEN: 'my-token' } });
|
||||
const result = await resolveServerEnv(server, resolver);
|
||||
expect(result).toEqual({ URL: 'https://ha.local', TOKEN: 'my-token' });
|
||||
});
|
||||
|
||||
it('caches secret lookups', async () => {
|
||||
it('calls the resolver once per distinct ref', async () => {
|
||||
const server = makeServer([
|
||||
{ name: 'A', valueFrom: { secretRef: { name: 'shared', key: 'KEY_A' } } },
|
||||
{ name: 'B', valueFrom: { secretRef: { name: 'shared', key: 'KEY_B' } } },
|
||||
]);
|
||||
const repo = mockSecretRepo({
|
||||
shared: { KEY_A: 'val-a', KEY_B: 'val-b' },
|
||||
});
|
||||
const result = await resolveServerEnv(server, repo);
|
||||
const resolver = mockResolver({ shared: { KEY_A: 'val-a', KEY_B: 'val-b' } });
|
||||
const result = await resolveServerEnv(server, resolver);
|
||||
expect(result).toEqual({ A: 'val-a', B: 'val-b' });
|
||||
expect(repo.findByName).toHaveBeenCalledTimes(1);
|
||||
// Resolver is called per-entry now — caching moved to the SecretService layer,
|
||||
// which is where downstream drivers can be hit at most once per (name, key) pair.
|
||||
expect(resolver.resolve).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('throws when secret not found', async () => {
|
||||
const server = makeServer([
|
||||
{ name: 'TOKEN', valueFrom: { secretRef: { name: 'missing', key: 'TOKEN' } } },
|
||||
]);
|
||||
const repo = mockSecretRepo({});
|
||||
await expect(resolveServerEnv(server, repo)).rejects.toThrow("Secret 'missing' not found");
|
||||
await expect(resolveServerEnv(server, mockResolver({}))).rejects.toThrow(/Secret 'missing' not found/);
|
||||
});
|
||||
|
||||
it('throws when secret key not found', async () => {
|
||||
const server = makeServer([
|
||||
{ name: 'TOKEN', valueFrom: { secretRef: { name: 'creds', key: 'NONEXISTENT' } } },
|
||||
]);
|
||||
const repo = mockSecretRepo({
|
||||
creds: { OTHER_KEY: 'val' },
|
||||
});
|
||||
await expect(resolveServerEnv(server, repo)).rejects.toThrow("Key 'NONEXISTENT' not found in secret 'creds'");
|
||||
const resolver = mockResolver({ creds: { OTHER_KEY: 'val' } });
|
||||
await expect(resolveServerEnv(server, resolver)).rejects.toThrow(/Key 'NONEXISTENT' not found/);
|
||||
});
|
||||
|
||||
it('returns empty map for empty env', async () => {
|
||||
const server = makeServer([]);
|
||||
const repo = mockSecretRepo({});
|
||||
const result = await resolveServerEnv(server, repo);
|
||||
const result = await resolveServerEnv(server, mockResolver({}));
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
246
src/mcpd/tests/mcp-token-service.test.ts
Normal file
246
src/mcpd/tests/mcp-token-service.test.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { McpTokenService, PermissionCeilingError } from '../src/services/mcp-token.service.js';
|
||||
import { NotFoundError, ConflictError } from '../src/services/mcp-server.service.js';
|
||||
import type { IMcpTokenRepository, McpTokenWithRelations } from '../src/repositories/interfaces.js';
|
||||
import type { IProjectRepository } from '../src/repositories/project.repository.js';
|
||||
import type { IRbacDefinitionRepository } from '../src/repositories/rbac-definition.repository.js';
|
||||
import type { RbacService } from '../src/services/rbac.service.js';
|
||||
import { hashToken, isMcpToken, TOKEN_PREFIX } from '@mcpctl/shared';
|
||||
|
||||
const PROJECT = { id: 'cproj1', name: 'myproj' };
|
||||
|
||||
function makeRow(overrides: Partial<McpTokenWithRelations> = {}): McpTokenWithRelations {
|
||||
return {
|
||||
id: 'ctok1',
|
||||
name: 'mytok',
|
||||
projectId: PROJECT.id,
|
||||
tokenHash: 'deadbeef',
|
||||
tokenPrefix: 'mcpctl_pat_abcd',
|
||||
ownerId: 'cuser1',
|
||||
description: '',
|
||||
createdAt: new Date(),
|
||||
expiresAt: null,
|
||||
lastUsedAt: null,
|
||||
revokedAt: null,
|
||||
project: PROJECT,
|
||||
owner: { id: 'cuser1', email: 'alice@example.com' },
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function mockTokenRepo(): IMcpTokenRepository {
|
||||
return {
|
||||
findAll: vi.fn(async () => []),
|
||||
findById: vi.fn(async () => null),
|
||||
findByHash: vi.fn(async () => null),
|
||||
findByNameAndProject: vi.fn(async () => null),
|
||||
create: vi.fn(async (input) => makeRow({
|
||||
name: input.name,
|
||||
projectId: input.projectId,
|
||||
tokenHash: input.tokenHash,
|
||||
tokenPrefix: input.tokenPrefix,
|
||||
ownerId: input.ownerId,
|
||||
description: input.description ?? '',
|
||||
expiresAt: input.expiresAt ?? null,
|
||||
})),
|
||||
revoke: vi.fn(async (id) => makeRow({ id, revokedAt: new Date() })),
|
||||
touchLastUsed: vi.fn(async () => {}),
|
||||
delete: vi.fn(async () => {}),
|
||||
};
|
||||
}
|
||||
|
||||
function mockProjectRepo(): IProjectRepository {
|
||||
return {
|
||||
findById: vi.fn(async (id) => (id === PROJECT.id ? PROJECT : null)),
|
||||
findByName: vi.fn(async (name) => (name === PROJECT.name ? PROJECT : null)),
|
||||
// minimal stubs for the rest — not exercised in these tests
|
||||
findAll: vi.fn(async () => []),
|
||||
create: vi.fn(),
|
||||
update: vi.fn(),
|
||||
delete: vi.fn(),
|
||||
attachServer: vi.fn(),
|
||||
detachServer: vi.fn(),
|
||||
listServers: vi.fn(async () => []),
|
||||
} as unknown as IProjectRepository;
|
||||
}
|
||||
|
||||
function mockRbacRepo(): IRbacDefinitionRepository {
|
||||
return {
|
||||
findAll: vi.fn(async () => []),
|
||||
findById: vi.fn(async () => null),
|
||||
findByName: vi.fn(async () => null),
|
||||
create: vi.fn(async () => ({ id: 'rbac-1', name: 'x', subjects: [], roleBindings: [], version: 1, createdAt: new Date(), updatedAt: new Date() })),
|
||||
update: vi.fn(),
|
||||
delete: vi.fn(async () => {}),
|
||||
};
|
||||
}
|
||||
|
||||
function mockRbacService(overrides: Partial<RbacService> = {}): RbacService {
|
||||
return {
|
||||
canAccess: vi.fn(async () => true),
|
||||
canRunOperation: vi.fn(async () => true),
|
||||
getAllowedScope: vi.fn(async () => ({ wildcard: true, names: new Set() })),
|
||||
getPermissions: vi.fn(async () => []),
|
||||
...overrides,
|
||||
} as unknown as RbacService;
|
||||
}
|
||||
|
||||
describe('McpTokenService.create', () => {
|
||||
let tokenRepo: ReturnType<typeof mockTokenRepo>;
|
||||
let projectRepo: IProjectRepository;
|
||||
let rbacRepo: ReturnType<typeof mockRbacRepo>;
|
||||
let rbacService: RbacService;
|
||||
let service: McpTokenService;
|
||||
|
||||
beforeEach(() => {
|
||||
tokenRepo = mockTokenRepo();
|
||||
projectRepo = mockProjectRepo();
|
||||
rbacRepo = mockRbacRepo();
|
||||
rbacService = mockRbacService();
|
||||
service = new McpTokenService(tokenRepo, projectRepo, rbacRepo, rbacService);
|
||||
});
|
||||
|
||||
it('creates a token with no bindings (rbacMode=empty, default)', async () => {
|
||||
const result = await service.create('cuser1', {
|
||||
name: 'mytok',
|
||||
projectId: PROJECT.id,
|
||||
});
|
||||
expect(result.raw).toMatch(new RegExp(`^${TOKEN_PREFIX}`));
|
||||
expect(isMcpToken(result.raw)).toBe(true);
|
||||
expect(tokenRepo.create).toHaveBeenCalledTimes(1);
|
||||
// Hash must be persisted, never raw
|
||||
const args = vi.mocked(tokenRepo.create).mock.calls[0]![0];
|
||||
expect(args.tokenHash).toBe(hashToken(result.raw));
|
||||
expect(args.tokenPrefix).toBe(result.raw.slice(0, 16));
|
||||
// No RBAC definition should be created when there are no bindings
|
||||
expect(rbacRepo.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('creates an RbacDefinition with subject McpToken:<sha> when bindings are given', async () => {
|
||||
const result = await service.create('cuser1', {
|
||||
name: 'mytok',
|
||||
projectId: PROJECT.id,
|
||||
bindings: [{ role: 'view', resource: 'servers' }],
|
||||
});
|
||||
expect(rbacRepo.create).toHaveBeenCalledTimes(1);
|
||||
const defArgs = vi.mocked(rbacRepo.create).mock.calls[0]![0];
|
||||
const subjects = defArgs.subjects as Array<{ kind: string; name: string }>;
|
||||
expect(subjects).toEqual([{ kind: 'McpToken', name: hashToken(result.raw) }]);
|
||||
expect(defArgs.roleBindings).toEqual([{ role: 'view', resource: 'servers' }]);
|
||||
});
|
||||
|
||||
it('rejects bindings the creator does not have (ceiling violation)', async () => {
|
||||
rbacService = mockRbacService({
|
||||
canAccess: vi.fn(async () => false),
|
||||
} as Partial<RbacService>);
|
||||
service = new McpTokenService(tokenRepo, projectRepo, rbacRepo, rbacService);
|
||||
|
||||
await expect(
|
||||
service.create('cuser1', {
|
||||
name: 'mytok',
|
||||
projectId: PROJECT.id,
|
||||
bindings: [{ role: 'edit', resource: 'servers' }],
|
||||
}),
|
||||
).rejects.toThrow(PermissionCeilingError);
|
||||
expect(tokenRepo.create).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('clones the creator\'s permissions when rbacMode=clone', async () => {
|
||||
rbacService = mockRbacService({
|
||||
getPermissions: vi.fn(async () => [
|
||||
{ role: 'view', resource: 'servers' },
|
||||
{ role: 'run', action: 'logs' },
|
||||
]),
|
||||
} as Partial<RbacService>);
|
||||
service = new McpTokenService(tokenRepo, projectRepo, rbacRepo, rbacService);
|
||||
|
||||
await service.create('cuser1', {
|
||||
name: 'mytok',
|
||||
projectId: PROJECT.id,
|
||||
rbacMode: 'clone',
|
||||
});
|
||||
expect(rbacRepo.create).toHaveBeenCalledTimes(1);
|
||||
const defArgs = vi.mocked(rbacRepo.create).mock.calls[0]![0];
|
||||
expect(defArgs.roleBindings).toEqual([
|
||||
{ role: 'view', resource: 'servers' },
|
||||
{ role: 'run', action: 'logs' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('throws NotFoundError if project does not exist', async () => {
|
||||
await expect(
|
||||
service.create('cuser1', { name: 'mytok', projectId: 'nope' }),
|
||||
).rejects.toThrow(NotFoundError);
|
||||
});
|
||||
|
||||
it('throws ConflictError if active token with same name in same project exists', async () => {
|
||||
vi.mocked(tokenRepo.findByNameAndProject).mockResolvedValueOnce(makeRow());
|
||||
await expect(
|
||||
service.create('cuser1', { name: 'mytok', projectId: PROJECT.id }),
|
||||
).rejects.toThrow(ConflictError);
|
||||
});
|
||||
});
|
||||
|
||||
describe('McpTokenService.introspectRaw', () => {
|
||||
let tokenRepo: ReturnType<typeof mockTokenRepo>;
|
||||
let service: McpTokenService;
|
||||
|
||||
beforeEach(() => {
|
||||
tokenRepo = mockTokenRepo();
|
||||
service = new McpTokenService(tokenRepo, mockProjectRepo(), mockRbacRepo(), mockRbacService());
|
||||
});
|
||||
|
||||
it('returns ok=false for unknown tokens', async () => {
|
||||
const result = await service.introspectRaw(`${TOKEN_PREFIX}unknown`);
|
||||
expect(result.ok).toBe(false);
|
||||
expect(result.tokenName).toBeUndefined();
|
||||
});
|
||||
|
||||
it('returns ok=true and principal info for active tokens, and updates lastUsedAt', async () => {
|
||||
const raw = `${TOKEN_PREFIX}aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa`;
|
||||
const hash = hashToken(raw);
|
||||
vi.mocked(tokenRepo.findByHash).mockResolvedValueOnce(makeRow({ tokenHash: hash }));
|
||||
const result = await service.introspectRaw(raw);
|
||||
expect(result.ok).toBe(true);
|
||||
expect(result.projectName).toBe(PROJECT.name);
|
||||
expect(result.tokenName).toBe('mytok');
|
||||
expect(tokenRepo.touchLastUsed).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('rejects revoked tokens', async () => {
|
||||
const raw = `${TOKEN_PREFIX}bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb`;
|
||||
vi.mocked(tokenRepo.findByHash).mockResolvedValueOnce(makeRow({ tokenHash: hashToken(raw), revokedAt: new Date() }));
|
||||
const result = await service.introspectRaw(raw);
|
||||
expect(result.ok).toBe(false);
|
||||
expect(result.revoked).toBe(true);
|
||||
});
|
||||
|
||||
it('rejects expired tokens', async () => {
|
||||
const raw = `${TOKEN_PREFIX}cccccccccccccccccccccccccccccccc`;
|
||||
const past = new Date(Date.now() - 60_000);
|
||||
vi.mocked(tokenRepo.findByHash).mockResolvedValueOnce(makeRow({ tokenHash: hashToken(raw), expiresAt: past }));
|
||||
const result = await service.introspectRaw(raw);
|
||||
expect(result.ok).toBe(false);
|
||||
expect(result.expired).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('McpTokenService.revoke', () => {
|
||||
it('marks revokedAt and removes the auto-created RbacDefinition', async () => {
|
||||
const tokenRepo = mockTokenRepo();
|
||||
const rbacRepo = mockRbacRepo();
|
||||
const service = new McpTokenService(tokenRepo, mockProjectRepo(), rbacRepo, mockRbacService());
|
||||
|
||||
const row = makeRow();
|
||||
vi.mocked(tokenRepo.findById).mockResolvedValue(row);
|
||||
vi.mocked(rbacRepo.findByName).mockResolvedValue({
|
||||
id: 'rbac-ctok1', name: 'mcptoken-ctok1', subjects: [], roleBindings: [], version: 1, createdAt: new Date(), updatedAt: new Date(),
|
||||
});
|
||||
|
||||
await service.revoke('ctok1');
|
||||
|
||||
expect(tokenRepo.revoke).toHaveBeenCalledWith('ctok1');
|
||||
expect(rbacRepo.findByName).toHaveBeenCalledWith('mcptoken-ctok1');
|
||||
expect(rbacRepo.delete).toHaveBeenCalledWith('rbac-ctok1');
|
||||
});
|
||||
});
|
||||
132
src/mcpd/tests/secret-backends.test.ts
Normal file
132
src/mcpd/tests/secret-backends.test.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { PlaintextDriver } from '../src/services/secret-backends/plaintext.js';
|
||||
import { OpenBaoDriver } from '../src/services/secret-backends/openbao.js';
|
||||
|
||||
describe('PlaintextDriver', () => {
|
||||
const driver = new PlaintextDriver({ listAllPlaintext: async () => [{ name: 'a', data: { k: 'v' } }] });
|
||||
|
||||
it('read returns the data passed in', async () => {
|
||||
const result = await driver.read({ name: 's', externalRef: '', data: { token: 'abc' } });
|
||||
expect(result).toEqual({ token: 'abc' });
|
||||
});
|
||||
|
||||
it('write returns storedData = input, externalRef = empty', async () => {
|
||||
const result = await driver.write({ name: 's', data: { k: 'v' } });
|
||||
expect(result).toEqual({ externalRef: '', storedData: { k: 'v' } });
|
||||
});
|
||||
|
||||
it('list delegates to the injected dep', async () => {
|
||||
const list = await driver.list();
|
||||
expect(list).toEqual([{ name: 'a', externalRef: '' }]);
|
||||
});
|
||||
|
||||
it('delete is a no-op', async () => {
|
||||
await expect(driver.delete({ name: 's', externalRef: '' })).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('OpenBaoDriver', () => {
|
||||
function makeFetch(responses: Array<{ url: RegExp; status: number; body?: unknown }>): ReturnType<typeof vi.fn> {
|
||||
return vi.fn(async (url: string | URL, _init?: RequestInit) => {
|
||||
const urlStr = String(url);
|
||||
const match = responses.find((r) => r.url.test(urlStr));
|
||||
if (!match) throw new Error(`unexpected fetch: ${urlStr}`);
|
||||
return new Response(match.body ? JSON.stringify(match.body) : '', { status: match.status });
|
||||
});
|
||||
}
|
||||
|
||||
const resolver = { resolve: vi.fn(async () => 'test-vault-token') };
|
||||
|
||||
it('write sends POST to .../data/<path> with {data: ...}', async () => {
|
||||
const fetchFn = makeFetch([{ url: /\/v1\/secret\/data\/mcpctl\/mytoken$/, status: 200 }]);
|
||||
const driver = new OpenBaoDriver(
|
||||
{ url: 'http://bao.example:8200', tokenSecretRef: { name: 'bao', key: 'token' } },
|
||||
{ fetch: fetchFn as unknown as typeof fetch, secretRefResolver: resolver },
|
||||
);
|
||||
const result = await driver.write({ name: 'mytoken', data: { api_key: 'secret-xyz' } });
|
||||
expect(result.externalRef).toBe('secret/mcpctl/mytoken');
|
||||
expect(result.storedData).toEqual({});
|
||||
expect(fetchFn).toHaveBeenCalledTimes(1);
|
||||
const [, init] = fetchFn.mock.calls[0] as [unknown, RequestInit];
|
||||
expect(init.method).toBe('POST');
|
||||
expect(JSON.parse(init.body as string)).toEqual({ data: { api_key: 'secret-xyz' } });
|
||||
const headers = init.headers as Record<string, string>;
|
||||
expect(headers['X-Vault-Token']).toBe('test-vault-token');
|
||||
});
|
||||
|
||||
it('read returns body.data.data', async () => {
|
||||
const fetchFn = makeFetch([{
|
||||
url: /\/v1\/secret\/data\/mcpctl\/mytoken$/,
|
||||
status: 200,
|
||||
body: { data: { data: { api_key: 'secret-xyz' } } },
|
||||
}]);
|
||||
const driver = new OpenBaoDriver(
|
||||
{ url: 'http://bao.example:8200', tokenSecretRef: { name: 'bao', key: 'token' } },
|
||||
{ fetch: fetchFn as unknown as typeof fetch, secretRefResolver: resolver },
|
||||
);
|
||||
const result = await driver.read({ name: 'mytoken', externalRef: 'secret/mcpctl/mytoken', data: {} });
|
||||
expect(result).toEqual({ api_key: 'secret-xyz' });
|
||||
});
|
||||
|
||||
it('read throws when the path 404s', async () => {
|
||||
const fetchFn = makeFetch([{ url: /\/data\//, status: 404 }]);
|
||||
const driver = new OpenBaoDriver(
|
||||
{ url: 'http://bao.example:8200', tokenSecretRef: { name: 'bao', key: 'token' } },
|
||||
{ fetch: fetchFn as unknown as typeof fetch, secretRefResolver: resolver },
|
||||
);
|
||||
await expect(driver.read({ name: 'missing', externalRef: '', data: {} })).rejects.toThrow(/not found/);
|
||||
});
|
||||
|
||||
it('delete swallows 404', async () => {
|
||||
const fetchFn = makeFetch([{ url: /\/metadata\//, status: 404 }]);
|
||||
const driver = new OpenBaoDriver(
|
||||
{ url: 'http://bao.example:8200', tokenSecretRef: { name: 'bao', key: 'token' } },
|
||||
{ fetch: fetchFn as unknown as typeof fetch, secretRefResolver: resolver },
|
||||
);
|
||||
await expect(driver.delete({ name: 'gone', externalRef: '' })).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it('list returns names from the metadata LIST call', async () => {
|
||||
const fetchFn = makeFetch([{
|
||||
url: /\/v1\/secret\/metadata\/mcpctl\/$/,
|
||||
status: 200,
|
||||
body: { data: { keys: ['token1', 'token2', 'sub-folder/'] } },
|
||||
}]);
|
||||
const driver = new OpenBaoDriver(
|
||||
{ url: 'http://bao.example:8200', tokenSecretRef: { name: 'bao', key: 'token' } },
|
||||
{ fetch: fetchFn as unknown as typeof fetch, secretRefResolver: resolver },
|
||||
);
|
||||
const result = await driver.list();
|
||||
// Sub-folders (trailing slash) are excluded; only leaf keys are returned.
|
||||
expect(result).toEqual([
|
||||
{ name: 'token1', externalRef: 'secret/mcpctl/token1' },
|
||||
{ name: 'token2', externalRef: 'secret/mcpctl/token2' },
|
||||
]);
|
||||
});
|
||||
|
||||
it('caches the vault token after first resolve', async () => {
|
||||
const fetchFn = makeFetch([
|
||||
{ url: /\/v1\/secret\/data\/mcpctl\//, status: 200, body: { data: { data: { k: 'v' } } } },
|
||||
]);
|
||||
const singleResolver = { resolve: vi.fn(async () => 'test-vault-token') };
|
||||
const driver = new OpenBaoDriver(
|
||||
{ url: 'http://bao.example:8200', tokenSecretRef: { name: 'bao', key: 'token' } },
|
||||
{ fetch: fetchFn as unknown as typeof fetch, secretRefResolver: singleResolver },
|
||||
);
|
||||
await driver.read({ name: 'a', externalRef: '', data: {} });
|
||||
await driver.read({ name: 'a', externalRef: '', data: {} });
|
||||
expect(singleResolver.resolve).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('propagates X-Vault-Namespace when configured', async () => {
|
||||
const fetchFn = makeFetch([{ url: /\/v1\/secret\/data\/mcpctl\//, status: 200 }]);
|
||||
const driver = new OpenBaoDriver(
|
||||
{ url: 'http://bao.example:8200', namespace: 'myteam', tokenSecretRef: { name: 'bao', key: 'token' } },
|
||||
{ fetch: fetchFn as unknown as typeof fetch, secretRefResolver: resolver },
|
||||
);
|
||||
await driver.write({ name: 'x', data: { k: 'v' } });
|
||||
const [, init] = fetchFn.mock.calls[0] as [unknown, RequestInit];
|
||||
const headers = init.headers as Record<string, string>;
|
||||
expect(headers['X-Vault-Namespace']).toBe('myteam');
|
||||
});
|
||||
});
|
||||
@@ -3,43 +3,68 @@ import Fastify from 'fastify';
|
||||
import type { FastifyInstance } from 'fastify';
|
||||
import { registerSecretRoutes } from '../src/routes/secrets.js';
|
||||
import { SecretService } from '../src/services/secret.service.js';
|
||||
import { SecretBackendService } from '../src/services/secret-backend.service.js';
|
||||
import { errorHandler } from '../src/middleware/error-handler.js';
|
||||
import type { ISecretRepository } from '../src/repositories/interfaces.js';
|
||||
import type { ISecretBackendRepository } from '../src/repositories/secret-backend.repository.js';
|
||||
import type { SecretBackend } from '@prisma/client';
|
||||
|
||||
let app: FastifyInstance;
|
||||
|
||||
function mockRepo(): ISecretRepository {
|
||||
let lastCreated: Record<string, unknown> | null = null;
|
||||
const PLAINTEXT_BACKEND: SecretBackend = {
|
||||
id: 'backend-plaintext',
|
||||
name: 'default',
|
||||
type: 'plaintext',
|
||||
config: {},
|
||||
isDefault: true,
|
||||
description: '',
|
||||
version: 1,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
|
||||
function makeSecret(overrides: Partial<{ id: string; name: string; data: Record<string, string>; externalRef: string; backendId: string }> = {}) {
|
||||
return {
|
||||
findAll: vi.fn(async () => [
|
||||
{ id: '1', name: 'ha-creds', data: { TOKEN: 'abc' }, version: 1, createdAt: new Date(), updatedAt: new Date() },
|
||||
]),
|
||||
id: overrides.id ?? 'sec-1',
|
||||
name: overrides.name ?? 'ha-creds',
|
||||
backendId: overrides.backendId ?? PLAINTEXT_BACKEND.id,
|
||||
data: overrides.data ?? { TOKEN: 'abc' },
|
||||
externalRef: overrides.externalRef ?? '',
|
||||
version: 1,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
}
|
||||
|
||||
function mockRepo(): ISecretRepository {
|
||||
let lastCreated: ReturnType<typeof makeSecret> | null = null;
|
||||
return {
|
||||
findAll: vi.fn(async () => [makeSecret()]),
|
||||
findById: vi.fn(async (id: string) => {
|
||||
if (lastCreated && (lastCreated as { id: string }).id === id) return lastCreated as never;
|
||||
if (lastCreated && lastCreated.id === id) return lastCreated;
|
||||
return null;
|
||||
}),
|
||||
findByName: vi.fn(async () => null),
|
||||
findByBackend: vi.fn(async () => []),
|
||||
create: vi.fn(async (data) => {
|
||||
const secret = {
|
||||
const secret = makeSecret({
|
||||
id: 'new-id',
|
||||
name: data.name,
|
||||
data: data.data ?? {},
|
||||
version: 1,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
externalRef: data.externalRef ?? '',
|
||||
backendId: data.backendId,
|
||||
});
|
||||
lastCreated = secret;
|
||||
return secret;
|
||||
}),
|
||||
update: vi.fn(async (id, data) => {
|
||||
const secret = {
|
||||
const secret = makeSecret({
|
||||
id,
|
||||
name: 'ha-creds',
|
||||
name: lastCreated?.name ?? 'ha-creds',
|
||||
data: data.data,
|
||||
version: 2,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
externalRef: data.externalRef,
|
||||
backendId: data.backendId ?? PLAINTEXT_BACKEND.id,
|
||||
});
|
||||
lastCreated = secret;
|
||||
return secret;
|
||||
}),
|
||||
@@ -47,14 +72,32 @@ function mockRepo(): ISecretRepository {
|
||||
};
|
||||
}
|
||||
|
||||
function mockBackendRepo(): ISecretBackendRepository {
|
||||
return {
|
||||
findAll: vi.fn(async () => [PLAINTEXT_BACKEND]),
|
||||
findById: vi.fn(async (id) => (id === PLAINTEXT_BACKEND.id ? PLAINTEXT_BACKEND : null)),
|
||||
findByName: vi.fn(async (name) => (name === PLAINTEXT_BACKEND.name ? PLAINTEXT_BACKEND : null)),
|
||||
findDefault: vi.fn(async () => PLAINTEXT_BACKEND),
|
||||
create: vi.fn(async () => PLAINTEXT_BACKEND),
|
||||
update: vi.fn(async () => PLAINTEXT_BACKEND),
|
||||
setAsDefault: vi.fn(async () => PLAINTEXT_BACKEND),
|
||||
delete: vi.fn(async () => {}),
|
||||
countReferencingSecrets: vi.fn(async () => 0),
|
||||
};
|
||||
}
|
||||
|
||||
afterEach(async () => {
|
||||
if (app) await app.close();
|
||||
});
|
||||
|
||||
function createApp(repo: ISecretRepository) {
|
||||
async function createApp(repo: ISecretRepository) {
|
||||
app = Fastify({ logger: false });
|
||||
app.setErrorHandler(errorHandler);
|
||||
const service = new SecretService(repo);
|
||||
const backends = new SecretBackendService(mockBackendRepo(), {
|
||||
plaintext: { listAllPlaintext: async () => [] },
|
||||
secretRefResolver: { resolve: async () => '' },
|
||||
});
|
||||
const service = new SecretService(repo, backends);
|
||||
registerSecretRoutes(app, service);
|
||||
return app.ready();
|
||||
}
|
||||
@@ -129,7 +172,7 @@ describe('Secret Routes', () => {
|
||||
describe('PUT /api/v1/secrets/:id', () => {
|
||||
it('updates a secret', async () => {
|
||||
const repo = mockRepo();
|
||||
vi.mocked(repo.findById).mockResolvedValue({ id: '1', name: 'ha-creds' } as never);
|
||||
vi.mocked(repo.findById).mockResolvedValue(makeSecret({ id: '1' }) as never);
|
||||
await createApp(repo);
|
||||
const res = await app.inject({
|
||||
method: 'PUT',
|
||||
@@ -154,7 +197,7 @@ describe('Secret Routes', () => {
|
||||
describe('DELETE /api/v1/secrets/:id', () => {
|
||||
it('deletes a secret and returns 204', async () => {
|
||||
const repo = mockRepo();
|
||||
vi.mocked(repo.findById).mockResolvedValue({ id: '1', name: 'ha-creds' } as never);
|
||||
vi.mocked(repo.findById).mockResolvedValue(makeSecret({ id: '1' }) as never);
|
||||
await createApp(repo);
|
||||
const res = await app.inject({ method: 'DELETE', url: '/api/v1/secrets/1' });
|
||||
expect(res.statusCode).toBe(204);
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
"clean": "rimraf dist",
|
||||
"dev": "tsx watch src/index.ts",
|
||||
"start": "node dist/index.js",
|
||||
"serve": "node dist/serve.js",
|
||||
"test": "vitest",
|
||||
"test:run": "vitest run",
|
||||
"test:smoke": "vitest run --config vitest.smoke.config.ts"
|
||||
|
||||
@@ -10,11 +10,17 @@ import type { McpdClient } from '../http/mcpd-client.js';
|
||||
const BATCH_SIZE = 50;
|
||||
const FLUSH_INTERVAL_MS = 5_000;
|
||||
|
||||
interface SessionPrincipal {
|
||||
userName?: string;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
}
|
||||
|
||||
export class AuditCollector {
|
||||
private queue: AuditEvent[] = [];
|
||||
private flushTimer: ReturnType<typeof setInterval> | null = null;
|
||||
private flushing = false;
|
||||
private sessionUserNames = new Map<string, string>();
|
||||
private sessionPrincipals = new Map<string, SessionPrincipal>();
|
||||
|
||||
constructor(
|
||||
private readonly mcpdClient: McpdClient,
|
||||
@@ -25,15 +31,26 @@ export class AuditCollector {
|
||||
|
||||
/** Register a userName for a session. All future events for this session auto-fill it. */
|
||||
setSessionUserName(sessionId: string, userName: string): void {
|
||||
this.sessionUserNames.set(sessionId, userName);
|
||||
const existing = this.sessionPrincipals.get(sessionId) ?? {};
|
||||
this.sessionPrincipals.set(sessionId, { ...existing, userName });
|
||||
}
|
||||
|
||||
/** Queue an audit event. Auto-fills projectName and userName (from session map). */
|
||||
/** Register McpToken identity for a session (HTTP-mode authenticated requests). */
|
||||
setSessionMcpToken(sessionId: string, token: { tokenName: string; tokenSha: string }): void {
|
||||
const existing = this.sessionPrincipals.get(sessionId) ?? {};
|
||||
this.sessionPrincipals.set(sessionId, { ...existing, tokenName: token.tokenName, tokenSha: token.tokenSha });
|
||||
}
|
||||
|
||||
/** Queue an audit event. Auto-fills projectName, userName, tokenName, and tokenSha. */
|
||||
emit(event: Omit<AuditEvent, 'projectName'>): void {
|
||||
const enriched: AuditEvent = { ...event, projectName: this.projectName };
|
||||
if (!enriched.userName && enriched.sessionId) {
|
||||
const name = this.sessionUserNames.get(enriched.sessionId);
|
||||
if (name) enriched.userName = name;
|
||||
if (enriched.sessionId) {
|
||||
const principal = this.sessionPrincipals.get(enriched.sessionId);
|
||||
if (principal) {
|
||||
if (!enriched.userName && principal.userName) enriched.userName = principal.userName;
|
||||
if (!enriched.tokenName && principal.tokenName) enriched.tokenName = principal.tokenName;
|
||||
if (!enriched.tokenSha && principal.tokenSha) enriched.tokenSha = principal.tokenSha;
|
||||
}
|
||||
}
|
||||
this.queue.push(enriched);
|
||||
if (this.queue.length >= BATCH_SIZE) {
|
||||
|
||||
@@ -32,5 +32,9 @@ export interface AuditEvent {
|
||||
correlationId?: string;
|
||||
parentEventId?: string;
|
||||
userName?: string;
|
||||
/** Set when the session authenticated via an McpToken (HTTP-mode mcplocal). */
|
||||
tokenName?: string;
|
||||
/** SHA-256 hash of the McpToken that made the request. */
|
||||
tokenSha?: string;
|
||||
payload: Record<string, unknown>;
|
||||
}
|
||||
|
||||
@@ -46,7 +46,13 @@ export async function refreshProjectUpstreams(
|
||||
servers = await mcpdClient.get<McpdServer[]>(path);
|
||||
}
|
||||
|
||||
return syncUpstreams(router, mcpdClient, servers);
|
||||
// Downstream upstream-proxy calls go through `mcpdClient` too. In HTTP-mode
|
||||
// mcplocal the pod has no credentials of its own, so the default token on
|
||||
// `mcpdClient` is an empty string — every /api/v1/mcp/proxy call would 401.
|
||||
// Bind a per-request client with the caller's bearer so each McpdUpstream
|
||||
// forwards the same identity that passed project discovery.
|
||||
const upstreamClient = authToken ? mcpdClient.withToken(authToken) : mcpdClient;
|
||||
return syncUpstreams(router, upstreamClient, servers);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -60,6 +60,16 @@ export class McpdClient {
|
||||
return new McpdClient(this.baseUrl, this.token, { ...this.extraHeaders }, timeoutMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new client with a different Bearer token. The HTTP-mode mcplocal
|
||||
* pod has no credentials of its own — each incoming client request carries
|
||||
* its McpToken, and this method is how we thread that token through to the
|
||||
* McpdUpstream instances created during project discovery.
|
||||
*/
|
||||
withToken(token: string): McpdClient {
|
||||
return new McpdClient(this.baseUrl, token, { ...this.extraHeaders }, this.timeoutMs);
|
||||
}
|
||||
|
||||
async get<T>(path: string): Promise<T> {
|
||||
return this.request<T>('GET', path);
|
||||
}
|
||||
|
||||
@@ -62,21 +62,31 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
|
||||
return existing.router;
|
||||
}
|
||||
|
||||
// HTTP-mode mcplocal has no pod-level credentials — the default
|
||||
// `mcpdClient.token` is an empty string. Every downstream call from this
|
||||
// request (upstream discovery, LLM config fetch, prompt index for
|
||||
// begin_session) has to use the CALLER's McpToken as the bearer, or mcpd
|
||||
// rejects with 401. Build one per-request client here and thread it
|
||||
// everywhere instead of sprinkling `.withToken(authToken)` at each call site.
|
||||
const requestClient = authToken ? mcpdClient.withToken(authToken) : mcpdClient;
|
||||
|
||||
// Create new router or refresh existing one
|
||||
const router = existing?.router ?? new McpRouter();
|
||||
await refreshProjectUpstreams(router, mcpdClient, projectName, authToken);
|
||||
|
||||
// Resolve project LLM model: local override → mcpd recommendation → global default
|
||||
const localOverride = loadProjectLlmOverride(projectName);
|
||||
const mcpdConfig = await fetchProjectLlmConfig(mcpdClient, projectName);
|
||||
const mcpdConfig = await fetchProjectLlmConfig(requestClient, projectName);
|
||||
const resolvedModel = localOverride?.model ?? mcpdConfig.llmModel ?? undefined;
|
||||
|
||||
// If project llmProvider is "none", disable LLM for this project
|
||||
const llmDisabled = mcpdConfig.llmProvider === 'none' || localOverride?.provider === 'none';
|
||||
const effectiveRegistry = llmDisabled ? null : (providerRegistry ?? null);
|
||||
|
||||
// Configure prompt resources with SA-scoped client for RBAC
|
||||
const saClient = mcpdClient.withHeaders({ 'X-Service-Account': `project:${projectName}` });
|
||||
// Configure prompt resources with SA-scoped client for RBAC.
|
||||
// Keep the X-Service-Account header for mcpd-side audit tagging, but carry
|
||||
// the caller's bearer so auth passes (the principal resolves as McpToken:<sha>).
|
||||
const saClient = requestClient.withHeaders({ 'X-Service-Account': `project:${projectName}` });
|
||||
router.setPromptConfig(saClient, projectName);
|
||||
|
||||
// System prompt fetcher for LLM consumers (uses router's cached fetcher)
|
||||
@@ -97,7 +107,8 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
|
||||
?? effectiveRegistry?.getActiveName()
|
||||
?? 'none';
|
||||
const llmModel = resolvedModel ?? 'default';
|
||||
const cache = new FileCache(`${llmProvider}--${llmModel}--${proxyModelName}`);
|
||||
const cacheConfig = process.env.MCPLOCAL_CACHE_DIR ? { dir: process.env.MCPLOCAL_CACHE_DIR } : undefined;
|
||||
const cache = new FileCache(`${llmProvider}--${llmModel}--${proxyModelName}`, cacheConfig);
|
||||
router.setProxyModel(proxyModelName, llmAdapter, cache);
|
||||
|
||||
// Per-server proxymodel overrides (if mcpd provides them)
|
||||
@@ -200,6 +211,17 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
|
||||
void ensureUserName().then((name) => {
|
||||
if (name) collector.setSessionUserName(id, name);
|
||||
});
|
||||
|
||||
// HTTP-mode mcplocal: if the token-auth preHandler attached an McpToken
|
||||
// principal to the request, tag the session so audit events carry the
|
||||
// tokenName/tokenSha alongside (or instead of) userName.
|
||||
const principal = request.mcpToken;
|
||||
if (principal) {
|
||||
collector.setSessionMcpToken(id, {
|
||||
tokenName: principal.tokenName,
|
||||
tokenSha: principal.tokenSha,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Audit: session_bind
|
||||
@@ -388,7 +410,7 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
|
||||
const llmAdapter = providerRegistry
|
||||
? new LLMProviderAdapter(providerRegistry)
|
||||
: { complete: async () => '', available: () => false };
|
||||
const cache = new FileCache('dynamic');
|
||||
const cache = new FileCache('dynamic', process.env.MCPLOCAL_CACHE_DIR ? { dir: process.env.MCPLOCAL_CACHE_DIR } : undefined);
|
||||
|
||||
if (serverName && serverProxyModel) {
|
||||
entry.router.setServerProxyModel(serverName, serverProxyModel, llmAdapter, cache);
|
||||
|
||||
114
src/mcplocal/src/http/token-auth.ts
Normal file
114
src/mcplocal/src/http/token-auth.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
/**
|
||||
* Fastify preHandler that authenticates `/projects/*` and `/mcp` requests
|
||||
* against mcpd's McpToken introspection endpoint.
|
||||
*
|
||||
* Flow:
|
||||
* 1. Reject non-Bearer and non-`mcpctl_pat_` auth up front.
|
||||
* 2. Call `GET <mcpd>/api/v1/mcptokens/introspect` with the raw bearer.
|
||||
* 3. Cache the result (positive + negative TTLs) to avoid a round-trip per MCP call.
|
||||
* 4. Enforce `request.params.projectName === response.projectName`.
|
||||
* 5. Stash the principal on `request.mcpToken` for the audit collector.
|
||||
*/
|
||||
import type { FastifyRequest, FastifyReply } from 'fastify';
|
||||
import { isMcpToken, hashToken } from '@mcpctl/shared';
|
||||
|
||||
export interface TokenAuthOptions {
|
||||
mcpdUrl: string;
|
||||
/** TTL for a successful introspection, ms. Default 30_000. */
|
||||
positiveTtlMs?: number;
|
||||
/** TTL for a failed introspection, ms. Default 5_000. */
|
||||
negativeTtlMs?: number;
|
||||
/** Injectable HTTP fetcher for tests. Defaults to `fetch`. */
|
||||
fetch?: (url: string, init?: RequestInit) => Promise<Response>;
|
||||
}
|
||||
|
||||
export interface McpTokenPrincipal {
|
||||
tokenName: string;
|
||||
tokenSha: string;
|
||||
projectName: string;
|
||||
}
|
||||
|
||||
declare module 'fastify' {
|
||||
interface FastifyRequest {
|
||||
/** Populated by the token-auth preHandler when the bearer was a McpToken. */
|
||||
mcpToken?: McpTokenPrincipal;
|
||||
}
|
||||
}
|
||||
|
||||
interface IntrospectResponse {
|
||||
ok: boolean;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
projectName?: string;
|
||||
revoked?: boolean;
|
||||
expired?: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface CacheEntry {
|
||||
result: IntrospectResponse;
|
||||
expiresAt: number;
|
||||
}
|
||||
|
||||
export function createTokenAuthMiddleware(opts: TokenAuthOptions) {
|
||||
const positiveTtl = opts.positiveTtlMs ?? 30_000;
|
||||
const negativeTtl = opts.negativeTtlMs ?? 5_000;
|
||||
const fetchImpl = opts.fetch ?? (globalThis.fetch as typeof fetch);
|
||||
const cache = new Map<string, CacheEntry>();
|
||||
|
||||
async function introspect(raw: string): Promise<IntrospectResponse> {
|
||||
const key = hashToken(raw);
|
||||
const now = Date.now();
|
||||
const hit = cache.get(key);
|
||||
if (hit && hit.expiresAt > now) return hit.result;
|
||||
|
||||
try {
|
||||
const res = await fetchImpl(`${opts.mcpdUrl.replace(/\/$/, '')}/api/v1/mcptokens/introspect`, {
|
||||
method: 'GET',
|
||||
headers: { Authorization: `Bearer ${raw}` },
|
||||
});
|
||||
const body = (await res.json().catch(() => ({ ok: false, error: 'unreadable body' }))) as IntrospectResponse;
|
||||
const result: IntrospectResponse = res.ok ? body : { ...body, ok: false };
|
||||
cache.set(key, { result, expiresAt: now + (result.ok ? positiveTtl : negativeTtl) });
|
||||
return result;
|
||||
} catch (err) {
|
||||
const result: IntrospectResponse = { ok: false, error: err instanceof Error ? err.message : String(err) };
|
||||
cache.set(key, { result, expiresAt: now + negativeTtl });
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
return async function tokenAuth(request: FastifyRequest, reply: FastifyReply): Promise<void> {
|
||||
const header = request.headers.authorization;
|
||||
if (header === undefined || !header.startsWith('Bearer ')) {
|
||||
reply.code(401).send({ error: 'Missing Authorization bearer' });
|
||||
return;
|
||||
}
|
||||
const raw = header.slice(7);
|
||||
if (!isMcpToken(raw)) {
|
||||
reply.code(401).send({ error: 'Only mcpctl_pat_ bearers are accepted on this endpoint' });
|
||||
return;
|
||||
}
|
||||
|
||||
const introspection = await introspect(raw);
|
||||
if (!introspection.ok) {
|
||||
reply.code(401).send({
|
||||
error: introspection.revoked ? 'Token revoked' : introspection.expired ? 'Token expired' : 'Invalid token',
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Project-scope check: token.projectName must match the path param.
|
||||
const params = request.params as { projectName?: string } | undefined;
|
||||
if (params?.projectName !== undefined && params.projectName !== introspection.projectName) {
|
||||
reply.code(403).send({ error: `Token is not valid for project '${params.projectName}'` });
|
||||
return;
|
||||
}
|
||||
|
||||
request.mcpToken = {
|
||||
tokenName: introspection.tokenName!,
|
||||
tokenSha: introspection.tokenSha!,
|
||||
projectName: introspection.projectName!,
|
||||
};
|
||||
};
|
||||
}
|
||||
111
src/mcplocal/src/serve.ts
Normal file
111
src/mcplocal/src/serve.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* HTTP-only entry for the containerized mcplocal (deployed behind Ingress as `mcp.ad.itaz.eu`).
|
||||
*
|
||||
* Differences from main.ts (the STDIO/systemd entry):
|
||||
* - No StdioProxyServer (there's no stdin/stdout MCP client in a pod).
|
||||
* - No `--upstream` flag (upstreams come from mcpd project discovery).
|
||||
* - Host + port from env (MCPLOCAL_HTTP_HOST / MCPLOCAL_HTTP_PORT).
|
||||
* - Requires MCPLOCAL_MCPD_URL to point at mcpd inside the cluster.
|
||||
* - Registers a token-auth preHandler on `/projects/*` and `/mcp`.
|
||||
* - FileCache directory honours MCPLOCAL_CACHE_DIR (wired via project-mcp-endpoint).
|
||||
*
|
||||
* Identity model: **the pod has no persistent identity to mcpd.** Every
|
||||
* inbound request's `Authorization: Bearer mcpctl_pat_…` is forwarded
|
||||
* verbatim for all downstream mcpd calls (introspect + project
|
||||
* discovery). mcpd's auth middleware dispatches on the `mcpctl_pat_`
|
||||
* prefix and resolves the McpToken principal. As a result there is
|
||||
* deliberately no MCPLOCAL_MCPD_TOKEN env var — adding one would only
|
||||
* create a rotation problem for a state we don't need.
|
||||
*/
|
||||
import { McpRouter } from './router.js';
|
||||
import { createHttpServer } from './http/server.js';
|
||||
import { loadHttpConfig, loadLlmProviders } from './http/config.js';
|
||||
import { createProvidersFromConfig } from './llm-config.js';
|
||||
import { createSecretStore } from '@mcpctl/shared';
|
||||
import { reloadStages, startWatchers, stopWatchers } from './proxymodel/watcher.js';
|
||||
import { createTokenAuthMiddleware } from './http/token-auth.js';
|
||||
|
||||
function requireEnv(name: string): string {
|
||||
const value = process.env[name];
|
||||
if (value === undefined || value === '') {
|
||||
throw new Error(`Required env var ${name} is not set`);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
export async function serve(): Promise<void> {
|
||||
const mcpdUrl = requireEnv('MCPLOCAL_MCPD_URL');
|
||||
const httpHost = process.env.MCPLOCAL_HTTP_HOST ?? '0.0.0.0';
|
||||
const httpPort = Number(process.env.MCPLOCAL_HTTP_PORT ?? '3200');
|
||||
if (!Number.isFinite(httpPort) || httpPort <= 0) {
|
||||
throw new Error(`Invalid MCPLOCAL_HTTP_PORT: ${process.env.MCPLOCAL_HTTP_PORT}`);
|
||||
}
|
||||
// MCPLOCAL_CACHE_DIR is optional; FileCache reads it directly.
|
||||
const cacheDir = process.env.MCPLOCAL_CACHE_DIR;
|
||||
|
||||
// loadHttpConfig reads user-level config.json; we override with env.
|
||||
const baseConfig = loadHttpConfig();
|
||||
const httpConfig = {
|
||||
...baseConfig,
|
||||
httpHost,
|
||||
httpPort,
|
||||
mcpdUrl,
|
||||
};
|
||||
|
||||
// LLM providers (configured via mounted ConfigMap at ~/.mcpctl/config.json or env).
|
||||
const llmEntries = loadLlmProviders();
|
||||
const secretStore = await createSecretStore();
|
||||
const providerRegistry = await createProvidersFromConfig(llmEntries, secretStore);
|
||||
|
||||
process.stderr.write(
|
||||
`mcplocal-serve: mcpd=${mcpdUrl} host=${httpHost} port=${httpPort} cache=${cacheDir ?? '~/.mcpctl/cache'}\n`,
|
||||
);
|
||||
|
||||
const router = new McpRouter();
|
||||
|
||||
const httpServer = await createHttpServer(httpConfig, { router, providerRegistry });
|
||||
|
||||
// Auth preHandler: only protect the MCP surfaces. /health, /healthz, /proxymodels etc stay open.
|
||||
// Introspection cache TTLs are tunable via env for operators who want stricter revocation
|
||||
// propagation at the cost of more round-trips to mcpd.
|
||||
const positiveTtlMs = Number(process.env.MCPLOCAL_TOKEN_POSITIVE_TTL_MS ?? '30000');
|
||||
const negativeTtlMs = Number(process.env.MCPLOCAL_TOKEN_NEGATIVE_TTL_MS ?? '5000');
|
||||
const tokenAuth = createTokenAuthMiddleware({ mcpdUrl, positiveTtlMs, negativeTtlMs });
|
||||
httpServer.addHook('preHandler', async (request, reply) => {
|
||||
const url = request.url;
|
||||
if (!url.startsWith('/projects/') && !url.startsWith('/mcp')) return;
|
||||
await tokenAuth(request, reply);
|
||||
});
|
||||
|
||||
await httpServer.listen({ port: httpPort, host: httpHost });
|
||||
process.stderr.write(`mcplocal-serve listening on ${httpHost}:${httpPort}\n`);
|
||||
|
||||
// Hot-reload proxymodel stages from ~/.mcpctl/stages (same as main.ts).
|
||||
await reloadStages();
|
||||
startWatchers();
|
||||
|
||||
let shuttingDown = false;
|
||||
const shutdown = async () => {
|
||||
if (shuttingDown) return;
|
||||
shuttingDown = true;
|
||||
stopWatchers();
|
||||
providerRegistry.disposeAll();
|
||||
await httpServer.close();
|
||||
await router.closeAll();
|
||||
process.exit(0);
|
||||
};
|
||||
process.on('SIGTERM', () => void shutdown());
|
||||
process.on('SIGINT', () => void shutdown());
|
||||
}
|
||||
|
||||
const isMain =
|
||||
process.argv[1]?.endsWith('serve.js') ||
|
||||
process.argv[1]?.endsWith('serve.ts');
|
||||
|
||||
if (isMain) {
|
||||
serve().catch((err) => {
|
||||
process.stderr.write(`Fatal: ${err}\n`);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
162
src/mcplocal/tests/http/token-auth.test.ts
Normal file
162
src/mcplocal/tests/http/token-auth.test.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
/**
|
||||
* Unit tests for the HTTP-mode token-auth preHandler.
|
||||
*
|
||||
* Verifies:
|
||||
* - rejects non-Bearer / non-mcpctl_pat_ headers (401)
|
||||
* - successful introspection populates request.mcpToken
|
||||
* - positive results are cached up to the positive TTL
|
||||
* - **revoked tokens surface as 401 within the negative-TTL window** ≤ 5s
|
||||
* - wrong-project path → 403
|
||||
*/
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import Fastify from 'fastify';
|
||||
import { createTokenAuthMiddleware } from '../../src/http/token-auth.js';
|
||||
|
||||
interface IntrospectResponse {
|
||||
ok: boolean;
|
||||
tokenName?: string;
|
||||
tokenSha?: string;
|
||||
projectName?: string;
|
||||
revoked?: boolean;
|
||||
expired?: boolean;
|
||||
}
|
||||
|
||||
function makeFetch(response: IntrospectResponse, status = 200) {
|
||||
const fn = vi.fn(async () => ({
|
||||
ok: status >= 200 && status < 300,
|
||||
json: async () => response,
|
||||
}) as unknown as Response);
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function setupApp(deps: Parameters<typeof createTokenAuthMiddleware>[0]) {
|
||||
const app = Fastify({ logger: false });
|
||||
const middleware = createTokenAuthMiddleware(deps);
|
||||
app.addHook('preHandler', middleware);
|
||||
app.get('/projects/:projectName/mcp', async (request) => ({
|
||||
ok: true,
|
||||
mcpToken: request.mcpToken,
|
||||
}));
|
||||
await app.ready();
|
||||
return app;
|
||||
}
|
||||
|
||||
describe('token-auth preHandler', () => {
|
||||
it('rejects requests with no Authorization header (401)', async () => {
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: makeFetch({ ok: true }) });
|
||||
const res = await app.inject({ method: 'GET', url: '/projects/foo/mcp' });
|
||||
expect(res.statusCode).toBe(401);
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('rejects bearers that are not mcpctl_pat_ tokens (401)', async () => {
|
||||
const fetchFn = makeFetch({ ok: true });
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: fetchFn });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/projects/foo/mcp',
|
||||
headers: { authorization: 'Bearer some-session-token' },
|
||||
});
|
||||
expect(res.statusCode).toBe(401);
|
||||
expect(fetchFn).not.toHaveBeenCalled();
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('passes valid tokens and populates request.mcpToken', async () => {
|
||||
const fetchFn = makeFetch({ ok: true, tokenName: 'demo', tokenSha: 'abc', projectName: 'foo' });
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: fetchFn });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/projects/foo/mcp',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_valid' },
|
||||
});
|
||||
expect(res.statusCode).toBe(200);
|
||||
const body = res.json<{ mcpToken: { tokenName: string; projectName: string } }>();
|
||||
expect(body.mcpToken.tokenName).toBe('demo');
|
||||
expect(body.mcpToken.projectName).toBe('foo');
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('rejects with 403 when the token is bound to a different project', async () => {
|
||||
const fetchFn = makeFetch({ ok: true, tokenName: 'demo', tokenSha: 'abc', projectName: 'foo' });
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: fetchFn });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/projects/other/mcp',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_valid' },
|
||||
});
|
||||
expect(res.statusCode).toBe(403);
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('caches positive introspections (does not re-hit mcpd within TTL)', async () => {
|
||||
const fetchFn = makeFetch({ ok: true, tokenName: 'demo', tokenSha: 'abc', projectName: 'foo' });
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: fetchFn, positiveTtlMs: 30_000 });
|
||||
const h = { authorization: 'Bearer mcpctl_pat_valid' };
|
||||
await app.inject({ method: 'GET', url: '/projects/foo/mcp', headers: h });
|
||||
await app.inject({ method: 'GET', url: '/projects/foo/mcp', headers: h });
|
||||
await app.inject({ method: 'GET', url: '/projects/foo/mcp', headers: h });
|
||||
expect(fetchFn).toHaveBeenCalledTimes(1);
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('surfaces revocation as 401 within the 5s negative cache (lag ≤ 5s)', async () => {
|
||||
// Simulate a revocation: first call returns ok:true, then flip to ok:false+revoked.
|
||||
let revoked = false;
|
||||
const fetchFn = vi.fn(async () => ({
|
||||
ok: !revoked,
|
||||
json: async () => revoked
|
||||
? { ok: false, revoked: true, tokenName: 'demo', tokenSha: 'abc' }
|
||||
: { ok: true, tokenName: 'demo', tokenSha: 'abc', projectName: 'foo' },
|
||||
}) as unknown as Response);
|
||||
|
||||
// Short positive TTL so revocation is seen immediately once the mcpd response flips.
|
||||
const app = await setupApp({
|
||||
mcpdUrl: 'http://mcpd',
|
||||
fetch: fetchFn,
|
||||
positiveTtlMs: 10,
|
||||
negativeTtlMs: 5_000,
|
||||
});
|
||||
const h = { authorization: 'Bearer mcpctl_pat_valid' };
|
||||
|
||||
const first = await app.inject({ method: 'GET', url: '/projects/foo/mcp', headers: h });
|
||||
expect(first.statusCode).toBe(200);
|
||||
|
||||
// Revoke out-of-band.
|
||||
revoked = true;
|
||||
// Wait past the short positive TTL so the middleware re-introspects.
|
||||
await new Promise((r) => setTimeout(r, 15));
|
||||
|
||||
const second = await app.inject({ method: 'GET', url: '/projects/foo/mcp', headers: h });
|
||||
expect(second.statusCode).toBe(401);
|
||||
expect(second.json<{ error: string }>().error).toContain('revoked');
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('returns 401 when mcpd introspect returns ok:false (unknown / invalid token)', async () => {
|
||||
const fetchFn = vi.fn(async () => ({
|
||||
ok: false,
|
||||
json: async () => ({ ok: false, error: 'Invalid token' }),
|
||||
}) as unknown as Response);
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: fetchFn });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/projects/foo/mcp',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_unknown' },
|
||||
});
|
||||
expect(res.statusCode).toBe(401);
|
||||
await app.close();
|
||||
});
|
||||
|
||||
it('returns 401 (not a crash) when mcpd is unreachable', async () => {
|
||||
const fetchFn = vi.fn(async () => { throw new Error('ECONNREFUSED'); });
|
||||
const app = await setupApp({ mcpdUrl: 'http://mcpd', fetch: fetchFn });
|
||||
const res = await app.inject({
|
||||
method: 'GET',
|
||||
url: '/projects/foo/mcp',
|
||||
headers: { authorization: 'Bearer mcpctl_pat_valid' },
|
||||
});
|
||||
expect(res.statusCode).toBe(401);
|
||||
await app.close();
|
||||
});
|
||||
});
|
||||
@@ -13,6 +13,7 @@ function mockMcpdClient(servers: Array<{ id: string; name: string; transport: st
|
||||
forward: vi.fn(async () => ({ status: 200, body: servers })),
|
||||
withTimeout: vi.fn(() => client),
|
||||
withHeaders: vi.fn(() => client),
|
||||
withToken: vi.fn(() => client),
|
||||
};
|
||||
return client;
|
||||
}
|
||||
|
||||
@@ -30,9 +30,13 @@ function mockMcpdClient() {
|
||||
delete: vi.fn(),
|
||||
forward: vi.fn(async () => ({ status: 200, body: [] })),
|
||||
withHeaders: vi.fn(),
|
||||
withToken: vi.fn(),
|
||||
withTimeout: vi.fn(),
|
||||
};
|
||||
// withHeaders returns a new client-like object (returns self for simplicity)
|
||||
// Chainable withX returns the same client for simplicity
|
||||
(client.withHeaders as ReturnType<typeof vi.fn>).mockReturnValue(client);
|
||||
(client.withToken as ReturnType<typeof vi.fn>).mockReturnValue(client);
|
||||
(client.withTimeout as ReturnType<typeof vi.fn>).mockReturnValue(client);
|
||||
return client;
|
||||
}
|
||||
|
||||
|
||||
158
src/mcplocal/tests/smoke/mcptoken.smoke.test.ts
Normal file
158
src/mcplocal/tests/smoke/mcptoken.smoke.test.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
/**
|
||||
* Smoke tests: McpToken + HTTP-mode mcplocal end-to-end.
|
||||
*
|
||||
* Exercises the full public CLI contract:
|
||||
* 1. `mcpctl create project` + `mcpctl create mcptoken`
|
||||
* 2. `mcpctl test mcp <url> --token $TOK --expect-tools …` → exit 0
|
||||
* 3. Same token against a different project → exit 1 (403)
|
||||
* 4. Revoke the token, retry → exit 1 (401) within the negative-cache window
|
||||
* 5. --expect-tools <nonexistent> → exit 2 (contract failure)
|
||||
*
|
||||
* Target endpoint: $MCPGW_URL (default https://mcp.ad.itaz.eu). The containerized
|
||||
* mcplocal must be deployed and reachable. If the /healthz preflight fails we
|
||||
* skip the whole suite with a clear message.
|
||||
*
|
||||
* Run with: pnpm test:smoke
|
||||
*/
|
||||
import { describe, it, expect, beforeAll } from 'vitest';
|
||||
import http from 'node:http';
|
||||
import https from 'node:https';
|
||||
import { execSync } from 'node:child_process';
|
||||
|
||||
const MCPGW_URL = process.env.MCPGW_URL ?? 'https://mcp.ad.itaz.eu';
|
||||
const PROJECT_NAME = `smoke-mcptoken-${Date.now().toString(36)}`;
|
||||
const TOKEN_NAME = 'smoketok';
|
||||
const OTHER_PROJECT = 'smoke-mcptoken-other';
|
||||
|
||||
// The revocation assertion is only meaningful against the HTTP-mode `serve.ts`
|
||||
// entry, which has the token-introspection cache (5s negative TTL). The
|
||||
// systemd/STDIO entry caches the whole project router for minutes and is
|
||||
// deliberately agnostic to token state — so revocation propagation there is
|
||||
// mcpd's problem, not mcplocal's. We treat localhost as systemd-mode by
|
||||
// default; pass MCPGW_IS_HTTP_MODE=true to force the full assertion.
|
||||
const IS_HTTP_MODE = process.env.MCPGW_IS_HTTP_MODE === 'true'
|
||||
|| (!/^(http|https):\/\/(localhost|127\.|0\.0\.0\.0)/i.test(MCPGW_URL));
|
||||
|
||||
interface CliResult { code: number; stdout: string; stderr: string }
|
||||
|
||||
function run(args: string): CliResult {
|
||||
try {
|
||||
const stdout = execSync(`mcpctl ${args}`, {
|
||||
encoding: 'utf-8',
|
||||
timeout: 30_000,
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
return { code: 0, stdout: stdout.trim(), stderr: '' };
|
||||
} catch (err) {
|
||||
const e = err as { status?: number; stdout?: Buffer | string; stderr?: Buffer | string };
|
||||
return {
|
||||
code: e.status ?? 1,
|
||||
stdout: e.stdout ? (typeof e.stdout === 'string' ? e.stdout : e.stdout.toString('utf-8')) : '',
|
||||
stderr: e.stderr ? (typeof e.stderr === 'string' ? e.stderr : e.stderr.toString('utf-8')) : '',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function healthz(url: string, timeoutMs = 5000): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
const parsed = new URL(`${url.replace(/\/$/, '')}/healthz`);
|
||||
const driver = parsed.protocol === 'https:' ? https : http;
|
||||
const req = driver.get(
|
||||
{
|
||||
hostname: parsed.hostname,
|
||||
port: parsed.port || (parsed.protocol === 'https:' ? 443 : 80),
|
||||
path: parsed.pathname,
|
||||
timeout: timeoutMs,
|
||||
},
|
||||
(res) => {
|
||||
resolve((res.statusCode ?? 500) < 500);
|
||||
res.resume();
|
||||
},
|
||||
);
|
||||
req.on('error', () => resolve(false));
|
||||
req.on('timeout', () => { req.destroy(); resolve(false); });
|
||||
});
|
||||
}
|
||||
|
||||
let gatewayUp = false;
|
||||
let rawToken = '';
|
||||
let knownToolName: string | undefined;
|
||||
|
||||
describe('mcptoken smoke', () => {
|
||||
beforeAll(async () => {
|
||||
gatewayUp = await healthz(MCPGW_URL);
|
||||
if (!gatewayUp) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(`\n ○ mcptoken smoke: skipped — ${MCPGW_URL}/healthz unreachable. Set MCPGW_URL to override.\n`);
|
||||
}
|
||||
}, 20_000);
|
||||
|
||||
it('creates the project and a project-scoped mcptoken', () => {
|
||||
if (!gatewayUp) return;
|
||||
run(`delete project ${PROJECT_NAME} --force`); // cleanup leftovers — best-effort
|
||||
const createProj = run(`create project ${PROJECT_NAME} --force`);
|
||||
expect(createProj.code).toBe(0);
|
||||
|
||||
const createTok = run(`create mcptoken ${TOKEN_NAME} --project ${PROJECT_NAME} --rbac clone`);
|
||||
expect(createTok.code).toBe(0);
|
||||
const match = createTok.stdout.match(/mcpctl_pat_[A-Za-z0-9]+/);
|
||||
expect(match, 'raw token was printed to stdout').not.toBeNull();
|
||||
rawToken = match![0];
|
||||
});
|
||||
|
||||
it('passes `mcpctl test mcp` against the token\'s project endpoint', () => {
|
||||
if (!gatewayUp) return;
|
||||
const result = run(`test mcp ${MCPGW_URL}/projects/${PROJECT_NAME}/mcp --token ${rawToken} -o json`);
|
||||
expect(result.code, result.stderr || result.stdout).toBe(0);
|
||||
const report = JSON.parse(result.stdout.slice(result.stdout.indexOf('{'))) as {
|
||||
exitCode: number;
|
||||
tools: string[] | null;
|
||||
initialize: string;
|
||||
};
|
||||
expect(report.exitCode).toBe(0);
|
||||
expect(report.initialize).toBe('ok');
|
||||
expect(Array.isArray(report.tools)).toBe(true);
|
||||
knownToolName = report.tools?.[0];
|
||||
});
|
||||
|
||||
it('fails `mcpctl test mcp` against a different project with 403', () => {
|
||||
if (!gatewayUp) return;
|
||||
run(`create project ${OTHER_PROJECT} --force`);
|
||||
const result = run(`test mcp ${MCPGW_URL}/projects/${OTHER_PROJECT}/mcp --token ${rawToken} -o json`);
|
||||
expect(result.code).toBe(1);
|
||||
const report = JSON.parse(result.stdout.slice(result.stdout.indexOf('{'))) as { error?: string };
|
||||
expect(report.error ?? '').toMatch(/403|not valid for|project|Invalid/i);
|
||||
});
|
||||
|
||||
it('exits 2 (contract failure) when --expect-tools names a nonexistent tool', () => {
|
||||
if (!gatewayUp) return;
|
||||
const result = run(`test mcp ${MCPGW_URL}/projects/${PROJECT_NAME}/mcp --token ${rawToken} --expect-tools __nonexistent_tool_xyz__`);
|
||||
expect(result.code).toBe(2);
|
||||
});
|
||||
|
||||
it('returns 401 after the token is revoked (within the negative-cache window)', async () => {
|
||||
if (!gatewayUp) return;
|
||||
if (!IS_HTTP_MODE) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.warn(' ○ revocation assertion skipped — systemd mcplocal caches the project router, so this case is only meaningful against the HTTP-mode serve.ts entry. Set MCPGW_IS_HTTP_MODE=true to force it.');
|
||||
// Still delete the token so cleanup runs the same way.
|
||||
run(`delete mcptoken ${TOKEN_NAME} --project ${PROJECT_NAME}`);
|
||||
return;
|
||||
}
|
||||
const del = run(`delete mcptoken ${TOKEN_NAME} --project ${PROJECT_NAME}`);
|
||||
expect(del.code).toBe(0);
|
||||
// Introspection negative TTL defaults to 5s — wait 7s to be safe.
|
||||
await new Promise((r) => setTimeout(r, 7_000));
|
||||
const result = run(`test mcp ${MCPGW_URL}/projects/${PROJECT_NAME}/mcp --token ${rawToken} -o json`);
|
||||
expect(result.code).toBe(1);
|
||||
const report = JSON.parse(result.stdout.slice(result.stdout.indexOf('{'))) as { error?: string };
|
||||
expect(report.error ?? '').toMatch(/401|revoked|Invalid token/i);
|
||||
}, 20_000);
|
||||
|
||||
it('cleans up test fixtures', () => {
|
||||
if (!gatewayUp) return;
|
||||
run(`delete project ${PROJECT_NAME} --force`);
|
||||
run(`delete project ${OTHER_PROJECT} --force`);
|
||||
expect(knownToolName === undefined || typeof knownToolName === 'string').toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -3,3 +3,5 @@ export * from './validation/index.js';
|
||||
export * from './constants/index.js';
|
||||
export * from './utils/index.js';
|
||||
export * from './secrets/index.js';
|
||||
export * from './tokens/index.js';
|
||||
export * from './mcp-http/index.js';
|
||||
|
||||
246
src/shared/src/mcp-http/index.ts
Normal file
246
src/shared/src/mcp-http/index.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
/**
|
||||
* Reusable Streamable-HTTP MCP client.
|
||||
*
|
||||
* Handles:
|
||||
* - Bearer auth (session tokens or McpToken PATs)
|
||||
* - mcp-session-id round-trip
|
||||
* - Both JSON and text/event-stream response bodies
|
||||
* - JSON-RPC id correlation when a response is multiplexed with notifications
|
||||
*
|
||||
* Used by the smoke suite (`SmokeMcpSession` is a thin wrapper around this)
|
||||
* and by `mcpctl test mcp <url>`.
|
||||
*/
|
||||
import http from 'node:http';
|
||||
import https from 'node:https';
|
||||
|
||||
export interface McpHttpSessionOptions {
|
||||
/** Bearer to send on every request. Accepts raw tokens (no "Bearer " prefix). */
|
||||
bearer?: string;
|
||||
/** Additional headers merged into every request. */
|
||||
headers?: Record<string, string>;
|
||||
/** Timeout per HTTP request in milliseconds. Defaults to 30_000. */
|
||||
timeoutMs?: number;
|
||||
}
|
||||
|
||||
export interface ToolInfo {
|
||||
name: string;
|
||||
description?: string;
|
||||
inputSchema?: unknown;
|
||||
}
|
||||
|
||||
export interface ToolCallResult {
|
||||
content: Array<{ type: string; text?: string }>;
|
||||
isError?: boolean;
|
||||
}
|
||||
|
||||
interface HttpRequestArgs {
|
||||
url: string;
|
||||
method: string;
|
||||
headers?: Record<string, string>;
|
||||
body?: string;
|
||||
timeoutMs?: number;
|
||||
}
|
||||
|
||||
interface HttpRequestResult {
|
||||
status: number;
|
||||
headers: http.IncomingHttpHeaders;
|
||||
body: string;
|
||||
}
|
||||
|
||||
function rawHttpRequest(opts: HttpRequestArgs): Promise<HttpRequestResult> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const parsed = new URL(opts.url);
|
||||
const driver = parsed.protocol === 'https:' ? https : http;
|
||||
const req = driver.request(
|
||||
{
|
||||
hostname: parsed.hostname,
|
||||
port: parsed.port || (parsed.protocol === 'https:' ? 443 : 80),
|
||||
path: parsed.pathname + parsed.search,
|
||||
method: opts.method,
|
||||
headers: opts.headers,
|
||||
timeout: opts.timeoutMs ?? 30_000,
|
||||
},
|
||||
(res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
resolve({
|
||||
status: res.statusCode ?? 0,
|
||||
headers: res.headers,
|
||||
body: Buffer.concat(chunks).toString('utf-8'),
|
||||
});
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
req.on('timeout', () => {
|
||||
req.destroy();
|
||||
reject(new Error('Request timed out'));
|
||||
});
|
||||
if (opts.body) req.write(opts.body);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function parseSse(body: string): unknown[] {
|
||||
const messages: unknown[] = [];
|
||||
for (const line of body.split('\n')) {
|
||||
if (line.startsWith('data: ')) {
|
||||
try {
|
||||
messages.push(JSON.parse(line.slice(6)));
|
||||
} catch {
|
||||
// skip malformed SSE data line
|
||||
}
|
||||
}
|
||||
}
|
||||
return messages;
|
||||
}
|
||||
|
||||
/** Thrown when the server returned a response JSON-RPC error payload. */
|
||||
export class McpProtocolError extends Error {
|
||||
constructor(public readonly code: number, message: string) {
|
||||
super(`MCP error ${code}: ${message}`);
|
||||
this.name = 'McpProtocolError';
|
||||
}
|
||||
}
|
||||
|
||||
/** Thrown when the HTTP layer rejected the request (auth, transport, 5xx). */
|
||||
export class McpTransportError extends Error {
|
||||
constructor(public readonly status: number, public readonly body: string, message?: string) {
|
||||
super(message ?? `HTTP ${status}: ${body.slice(0, 200)}`);
|
||||
this.name = 'McpTransportError';
|
||||
}
|
||||
}
|
||||
|
||||
export class McpHttpSession {
|
||||
private sessionId: string | undefined;
|
||||
private nextId = 1;
|
||||
|
||||
constructor(
|
||||
/** Full URL of the MCP endpoint (e.g. `https://mcp.example.com/projects/foo/mcp`). */
|
||||
public readonly url: string,
|
||||
private readonly options: McpHttpSessionOptions = {},
|
||||
) {}
|
||||
|
||||
private buildHeaders(extra: Record<string, string> = {}): Record<string, string> {
|
||||
const headers: Record<string, string> = {
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json, text/event-stream',
|
||||
...(this.options.headers ?? {}),
|
||||
...extra,
|
||||
};
|
||||
if (this.sessionId) headers['mcp-session-id'] = this.sessionId;
|
||||
if (this.options.bearer) headers['Authorization'] = `Bearer ${this.options.bearer}`;
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a JSON-RPC request and wait for the response with a matching id.
|
||||
* Handles both single JSON and multiplexed SSE bodies.
|
||||
*/
|
||||
async send(method: string, params: Record<string, unknown> = {}): Promise<unknown> {
|
||||
const id = this.nextId++;
|
||||
const request = { jsonrpc: '2.0', id, method, params };
|
||||
|
||||
const args: HttpRequestArgs = {
|
||||
url: this.url,
|
||||
method: 'POST',
|
||||
headers: this.buildHeaders(),
|
||||
body: JSON.stringify(request),
|
||||
};
|
||||
if (this.options.timeoutMs !== undefined) args.timeoutMs = this.options.timeoutMs;
|
||||
const result = await rawHttpRequest(args);
|
||||
|
||||
if (!this.sessionId) {
|
||||
const sid = result.headers['mcp-session-id'];
|
||||
if (typeof sid === 'string') this.sessionId = sid;
|
||||
}
|
||||
|
||||
if (result.status >= 400) {
|
||||
let message = `HTTP ${result.status}`;
|
||||
try {
|
||||
const body = JSON.parse(result.body) as { error?: string | { message?: string } };
|
||||
const errField = body.error;
|
||||
if (typeof errField === 'string') message = errField;
|
||||
else if (errField && typeof errField === 'object' && typeof errField.message === 'string') message = errField.message;
|
||||
} catch {
|
||||
message = `HTTP ${result.status}: ${result.body.slice(0, 200)}`;
|
||||
}
|
||||
throw new McpTransportError(result.status, result.body, message);
|
||||
}
|
||||
|
||||
const messages = result.headers['content-type']?.includes('text/event-stream')
|
||||
? parseSse(result.body)
|
||||
: [JSON.parse(result.body)];
|
||||
|
||||
const matched = messages.find((m) => {
|
||||
const msg = m as { id?: unknown };
|
||||
return msg.id === id;
|
||||
}) as { result?: unknown; error?: { code: number; message: string } } | undefined;
|
||||
|
||||
const parsed = matched ?? messages[0] as { result?: unknown; error?: { code: number; message: string } } | undefined;
|
||||
if (!parsed) throw new Error(`No response for ${method}`);
|
||||
if (parsed.error) throw new McpProtocolError(parsed.error.code, parsed.error.message);
|
||||
return parsed.result;
|
||||
}
|
||||
|
||||
async sendNotification(method: string, params: Record<string, unknown> = {}): Promise<void> {
|
||||
const notification = { jsonrpc: '2.0', method, params };
|
||||
const args: HttpRequestArgs = {
|
||||
url: this.url,
|
||||
method: 'POST',
|
||||
headers: this.buildHeaders(),
|
||||
body: JSON.stringify(notification),
|
||||
};
|
||||
if (this.options.timeoutMs !== undefined) args.timeoutMs = this.options.timeoutMs;
|
||||
await rawHttpRequest(args).catch(() => { /* best-effort */ });
|
||||
}
|
||||
|
||||
/** MCP `initialize` handshake. */
|
||||
async initialize(): Promise<{ protocolVersion?: string; serverInfo?: { name?: string; version?: string }; capabilities?: unknown }> {
|
||||
return await this.send('initialize', {
|
||||
protocolVersion: '2024-11-05',
|
||||
capabilities: {},
|
||||
clientInfo: { name: 'mcpctl-mcp-http-client', version: '1.0.0' },
|
||||
}) as { protocolVersion?: string; serverInfo?: { name?: string; version?: string }; capabilities?: unknown };
|
||||
}
|
||||
|
||||
/** List tools exposed by the endpoint. */
|
||||
async listTools(): Promise<ToolInfo[]> {
|
||||
const result = await this.send('tools/list') as { tools?: ToolInfo[] };
|
||||
return result.tools ?? [];
|
||||
}
|
||||
|
||||
/** Call a tool and return its `content` payload. */
|
||||
async callTool(name: string, args: Record<string, unknown> = {}): Promise<ToolCallResult> {
|
||||
return await this.send('tools/call', { name, arguments: args }) as ToolCallResult;
|
||||
}
|
||||
|
||||
/** Clean-close the session with a DELETE. Safe to call when no sessionId has been negotiated. */
|
||||
async close(): Promise<void> {
|
||||
if (this.sessionId === undefined) return;
|
||||
await rawHttpRequest({
|
||||
url: this.url,
|
||||
method: 'DELETE',
|
||||
headers: this.buildHeaders(),
|
||||
timeoutMs: 5_000,
|
||||
}).catch(() => { /* best-effort */ });
|
||||
this.sessionId = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/** Best-effort healthcheck against `<base>/healthz`. */
|
||||
export async function mcpHealthCheck(baseUrl: string, timeoutMs = 5_000): Promise<boolean> {
|
||||
try {
|
||||
const res = await rawHttpRequest({ url: `${baseUrl.replace(/\/$/, '')}/healthz`, method: 'GET', timeoutMs });
|
||||
return res.status >= 200 && res.status < 500;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/** Derive `<scheme>://<host>[:port]` from a full MCP endpoint URL (for healthcheck). */
|
||||
export function deriveBaseUrl(mcpUrl: string): string {
|
||||
const u = new URL(mcpUrl);
|
||||
return `${u.protocol}//${u.host}`;
|
||||
}
|
||||
41
src/shared/src/tokens/index.ts
Normal file
41
src/shared/src/tokens/index.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { createHash, randomBytes, timingSafeEqual } from 'node:crypto';
|
||||
|
||||
export const TOKEN_PREFIX = 'mcpctl_pat_';
|
||||
|
||||
// base62 alphabet (URL/header safe, no ambiguous chars across all positions)
|
||||
const BASE62 = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
|
||||
|
||||
export interface GeneratedToken {
|
||||
/** The raw token — `mcpctl_pat_<32 base62 chars>`. Shown once at create time; never stored. */
|
||||
raw: string;
|
||||
/** SHA-256 hex digest of the raw value. Persist this, not the raw value. */
|
||||
hash: string;
|
||||
/** First 16 chars of the raw token, safe to display (e.g. in `mcpctl get mcptoken`). */
|
||||
prefix: string;
|
||||
}
|
||||
|
||||
export function generateToken(): GeneratedToken {
|
||||
const bytes = randomBytes(24);
|
||||
let body = '';
|
||||
for (const b of bytes) body += BASE62[b % 62];
|
||||
const raw = TOKEN_PREFIX + body;
|
||||
return { raw, hash: hashToken(raw), prefix: raw.slice(0, 16) };
|
||||
}
|
||||
|
||||
export function hashToken(raw: string): string {
|
||||
return createHash('sha256').update(raw).digest('hex');
|
||||
}
|
||||
|
||||
export function isMcpToken(bearer: string): boolean {
|
||||
return bearer.startsWith(TOKEN_PREFIX);
|
||||
}
|
||||
|
||||
/** Constant-time compare two equal-length hex strings. Returns false on length mismatch. */
|
||||
export function timingSafeEqualHex(a: string, b: string): boolean {
|
||||
if (a.length !== b.length) return false;
|
||||
try {
|
||||
return timingSafeEqual(Buffer.from(a, 'hex'), Buffer.from(b, 'hex'));
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user