Compare commits
5 Commits
feat/tests
...
feat/datab
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
da90f01dc1 | ||
|
|
ae7d79da6f | ||
|
|
d2a682a460 | ||
|
|
1b66e235fc | ||
|
|
981585a943 |
@@ -1,15 +0,0 @@
|
|||||||
node_modules
|
|
||||||
*/node_modules
|
|
||||||
**/node_modules
|
|
||||||
dist
|
|
||||||
**/dist
|
|
||||||
.git
|
|
||||||
.taskmaster
|
|
||||||
.claude
|
|
||||||
*.md
|
|
||||||
!pnpm-workspace.yaml
|
|
||||||
.env
|
|
||||||
.env.*
|
|
||||||
deploy/docker-compose.yml
|
|
||||||
src/cli
|
|
||||||
src/mcplocal
|
|
||||||
@@ -1,142 +0,0 @@
|
|||||||
name: CI
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [main]
|
|
||||||
pull_request:
|
|
||||||
branches: [main]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
lint:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 9
|
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
cache: pnpm
|
|
||||||
|
|
||||||
- run: pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
- name: Lint
|
|
||||||
run: pnpm lint
|
|
||||||
|
|
||||||
typecheck:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 9
|
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
cache: pnpm
|
|
||||||
|
|
||||||
- run: pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
- name: Generate Prisma client
|
|
||||||
run: pnpm --filter @mcpctl/db exec prisma generate
|
|
||||||
|
|
||||||
- name: Typecheck
|
|
||||||
run: pnpm typecheck
|
|
||||||
|
|
||||||
test:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 9
|
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
cache: pnpm
|
|
||||||
|
|
||||||
- run: pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
- name: Generate Prisma client
|
|
||||||
run: pnpm --filter @mcpctl/db exec prisma generate
|
|
||||||
|
|
||||||
- name: Run tests
|
|
||||||
run: pnpm test:run
|
|
||||||
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [lint, typecheck, test]
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 9
|
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
cache: pnpm
|
|
||||||
|
|
||||||
- run: pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
- name: Generate Prisma client
|
|
||||||
run: pnpm --filter @mcpctl/db exec prisma generate
|
|
||||||
|
|
||||||
- name: Build all packages
|
|
||||||
run: pnpm build
|
|
||||||
|
|
||||||
package:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: [build]
|
|
||||||
if: github.ref == 'refs/heads/main' && github.event_name == 'push'
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- uses: pnpm/action-setup@v4
|
|
||||||
with:
|
|
||||||
version: 9
|
|
||||||
|
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
cache: pnpm
|
|
||||||
|
|
||||||
- run: pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
- name: Generate Prisma client
|
|
||||||
run: pnpm --filter @mcpctl/db exec prisma generate
|
|
||||||
|
|
||||||
- name: Build TypeScript
|
|
||||||
run: pnpm build
|
|
||||||
|
|
||||||
- name: Install bun
|
|
||||||
uses: oven-sh/setup-bun@v2
|
|
||||||
|
|
||||||
- name: Install nfpm
|
|
||||||
run: |
|
|
||||||
curl -sL -o /tmp/nfpm.tar.gz "https://github.com/goreleaser/nfpm/releases/download/v2.45.0/nfpm_2.45.0_Linux_x86_64.tar.gz"
|
|
||||||
tar xzf /tmp/nfpm.tar.gz -C /usr/local/bin nfpm
|
|
||||||
|
|
||||||
- name: Bundle standalone binary
|
|
||||||
run: bun build src/cli/src/index.ts --compile --outfile dist/mcpctl
|
|
||||||
|
|
||||||
- name: Build RPM
|
|
||||||
run: nfpm pkg --packager rpm --target dist/
|
|
||||||
|
|
||||||
- name: Publish to Gitea packages
|
|
||||||
env:
|
|
||||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
|
||||||
run: |
|
|
||||||
RPM_FILE=$(ls dist/mcpctl-*.rpm | head -1)
|
|
||||||
curl --fail -X PUT \
|
|
||||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
|
||||||
--upload-file "$RPM_FILE" \
|
|
||||||
"${{ github.server_url }}/api/packages/${{ github.repository_owner }}/rpm/upload"
|
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -9,8 +9,6 @@ dist/
|
|||||||
.env
|
.env
|
||||||
.env.local
|
.env.local
|
||||||
.env.*.local
|
.env.*.local
|
||||||
stack/.env
|
|
||||||
.portainer_password
|
|
||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
logs/
|
logs/
|
||||||
@@ -37,4 +35,3 @@ pgdata/
|
|||||||
|
|
||||||
# Prisma
|
# Prisma
|
||||||
src/db/prisma/migrations/*.sql.backup
|
src/db/prisma/migrations/*.sql.backup
|
||||||
logs.sh
|
|
||||||
|
|||||||
@@ -1,272 +0,0 @@
|
|||||||
# mcpctl v2 - Corrected 3-Tier Architecture PRD
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
mcpctl is a kubectl-inspired system for managing MCP (Model Context Protocol) servers. It consists of 4 components arranged in a 3-tier architecture:
|
|
||||||
|
|
||||||
```
|
|
||||||
Claude Code
|
|
||||||
|
|
|
||||||
v (stdio - MCP protocol)
|
|
||||||
mcplocal (Local Daemon - runs on developer machine)
|
|
||||||
|
|
|
||||||
v (HTTP REST)
|
|
||||||
mcpd (External Daemon - runs on server/NAS)
|
|
||||||
|
|
|
||||||
v (Docker API / K8s API)
|
|
||||||
mcp_servers (MCP server containers)
|
|
||||||
```
|
|
||||||
|
|
||||||
## Components
|
|
||||||
|
|
||||||
### 1. mcpctl (CLI Tool)
|
|
||||||
- **Package**: `src/cli/` (`@mcpctl/cli`)
|
|
||||||
- **What it is**: kubectl-like CLI for managing the entire system
|
|
||||||
- **Talks to**: mcplocal (local daemon) via HTTP REST
|
|
||||||
- **Key point**: mcpctl does NOT talk to mcpd directly. It always goes through mcplocal.
|
|
||||||
- **Distributed as**: RPM package via Gitea registry (bun compile + nfpm)
|
|
||||||
- **Commands**: get, describe, apply, setup, instance, claude, project, backup, restore, config, status
|
|
||||||
|
|
||||||
### 2. mcplocal (Local Daemon)
|
|
||||||
- **Package**: `src/local-proxy/` (rename to `src/mcplocal/`)
|
|
||||||
- **What it is**: Local daemon running on the developer's machine
|
|
||||||
- **Talks to**: mcpd (external daemon) via HTTP REST
|
|
||||||
- **Exposes to Claude**: MCP protocol via stdio (tools, resources, prompts)
|
|
||||||
- **Exposes to mcpctl**: HTTP REST API for management commands
|
|
||||||
|
|
||||||
**Core responsibility: LLM Pre-processing**
|
|
||||||
|
|
||||||
This is the intelligence layer. When Claude asks for data from MCP servers, mcplocal:
|
|
||||||
|
|
||||||
1. Receives Claude's request (e.g., "get Slack messages about security")
|
|
||||||
2. Uses a local/cheap LLM (Gemini CLI binary, Ollama, vLLM, DeepSeek API) to interpret what Claude actually wants
|
|
||||||
3. Sends narrow, filtered requests to mcpd which forwards to the actual MCP servers
|
|
||||||
4. Receives raw results from MCP servers (via mcpd)
|
|
||||||
5. Uses the local LLM again to filter/summarize results - extracting only what's relevant
|
|
||||||
6. Returns the smallest, most comprehensive response to Claude
|
|
||||||
|
|
||||||
**Why**: Claude Code tokens are expensive. Instead of dumping 500 Slack messages into Claude's context window, mcplocal uses a cheap LLM to pre-filter to the 12 relevant ones.
|
|
||||||
|
|
||||||
**LLM Provider Strategy** (already partially exists):
|
|
||||||
- Gemini CLI binary (local, free)
|
|
||||||
- Ollama (local, free)
|
|
||||||
- vLLM (local, free)
|
|
||||||
- DeepSeek API (cheap)
|
|
||||||
- OpenAI API (fallback)
|
|
||||||
- Anthropic API (fallback)
|
|
||||||
|
|
||||||
**Additional mcplocal responsibilities**:
|
|
||||||
- MCP protocol routing (namespace tools: `slack/send_message`, `jira/create_issue`)
|
|
||||||
- Connection health monitoring for upstream MCP servers
|
|
||||||
- Caching frequently requested data
|
|
||||||
- Proxying mcpctl management commands to mcpd
|
|
||||||
|
|
||||||
### 3. mcpd (External Daemon)
|
|
||||||
- **Package**: `src/mcpd/` (`@mcpctl/mcpd`)
|
|
||||||
- **What it is**: Server-side daemon that runs on centralized infrastructure (Synology NAS, cloud server, etc.)
|
|
||||||
- **Deployed via**: Docker Compose (Dockerfile + docker-compose.yml)
|
|
||||||
- **Database**: PostgreSQL for state, audit logs, access control
|
|
||||||
|
|
||||||
**Core responsibilities**:
|
|
||||||
- **Deploy and run MCP server containers** (Docker now, Kubernetes later)
|
|
||||||
- **Instance lifecycle management**: start, stop, restart, logs, inspect
|
|
||||||
- **MCP server registry**: Store server definitions, configuration templates, profiles
|
|
||||||
- **Project management**: Group MCP profiles into projects for Claude sessions
|
|
||||||
- **Auditing**: Log every operation - who ran what, when, with what result
|
|
||||||
- **Access management**: Users, sessions, permissions - who can access which MCP servers
|
|
||||||
- **Credential storage**: MCP servers often need API tokens (Slack, Jira, GitHub) - stored securely on server side, never exposed to local machine
|
|
||||||
- **Backup/restore**: Export and import configuration
|
|
||||||
|
|
||||||
**Key point**: mcpd holds the credentials. When mcplocal asks mcpd to query Slack, mcpd runs the Slack MCP server container with the proper SLACK_TOKEN injected - mcplocal never sees the token.
|
|
||||||
|
|
||||||
### 4. mcp_servers (MCP Server Containers)
|
|
||||||
- **What they are**: The actual MCP server processes (Slack, Jira, GitHub, Terraform, filesystem, postgres, etc.)
|
|
||||||
- **Managed by**: mcpd via Docker/Podman API
|
|
||||||
- **Network**: Isolated network, only accessible by mcpd
|
|
||||||
- **Credentials**: Injected by mcpd as environment variables
|
|
||||||
- **Communication**: MCP protocol (stdio or SSE/HTTP) between mcpd and the containers
|
|
||||||
|
|
||||||
## Data Flow Examples
|
|
||||||
|
|
||||||
### Example 1: Claude asks for Slack messages
|
|
||||||
```
|
|
||||||
Claude: "Get messages about security incidents from the last week"
|
|
||||||
|
|
|
||||||
v (MCP tools/call: slack/search_messages)
|
|
||||||
mcplocal:
|
|
||||||
1. Intercepts the tool call
|
|
||||||
2. Calls local Gemini: "User wants security incident messages from last week.
|
|
||||||
Generate optimal Slack search query and date filters."
|
|
||||||
3. Gemini returns: query="security incident OR vulnerability OR CVE", after="2024-01-15"
|
|
||||||
4. Sends filtered request to mcpd
|
|
||||||
|
|
|
||||||
v (HTTP POST /api/v1/mcp/proxy)
|
|
||||||
mcpd:
|
|
||||||
1. Looks up Slack MCP instance (injects SLACK_TOKEN)
|
|
||||||
2. Forwards narrowed query to Slack MCP server container
|
|
||||||
3. Returns raw results (200 messages)
|
|
||||||
|
|
|
||||||
v (response)
|
|
||||||
mcplocal:
|
|
||||||
1. Receives 200 messages
|
|
||||||
2. Calls local Gemini: "Filter these 200 Slack messages. Keep only those
|
|
||||||
directly about security incidents. Return message IDs and 1-line summaries."
|
|
||||||
3. Gemini returns: 15 relevant messages with summaries
|
|
||||||
4. Returns filtered result to Claude
|
|
||||||
|
|
|
||||||
v (MCP response: 15 messages instead of 200)
|
|
||||||
Claude: processes only the relevant 15 messages
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example 2: mcpctl management command
|
|
||||||
```
|
|
||||||
$ mcpctl get servers
|
|
||||||
|
|
|
||||||
v (HTTP GET)
|
|
||||||
mcplocal:
|
|
||||||
1. Recognizes this is a management command (not MCP data)
|
|
||||||
2. Proxies directly to mcpd (no LLM processing needed)
|
|
||||||
|
|
|
||||||
v (HTTP GET /api/v1/servers)
|
|
||||||
mcpd:
|
|
||||||
1. Queries PostgreSQL for server definitions
|
|
||||||
2. Returns list
|
|
||||||
|
|
|
||||||
v (proxied response)
|
|
||||||
mcplocal -> mcpctl -> formatted table output
|
|
||||||
```
|
|
||||||
|
|
||||||
### Example 3: mcpctl instance management
|
|
||||||
```
|
|
||||||
$ mcpctl instance start slack
|
|
||||||
|
|
|
||||||
v
|
|
||||||
mcplocal -> mcpd:
|
|
||||||
1. Creates Docker container for Slack MCP server
|
|
||||||
2. Injects SLACK_TOKEN from secure storage
|
|
||||||
3. Connects to isolated mcp-servers network
|
|
||||||
4. Logs audit entry: "user X started slack instance"
|
|
||||||
5. Returns instance status
|
|
||||||
```
|
|
||||||
|
|
||||||
## What Already Exists (completed work)
|
|
||||||
|
|
||||||
### Done and reusable as-is:
|
|
||||||
- Project structure: pnpm monorepo, TypeScript strict mode, Vitest, ESLint
|
|
||||||
- Database schema: Prisma + PostgreSQL (User, McpServer, McpProfile, Project, McpInstance, AuditLog)
|
|
||||||
- mcpd server framework: Fastify 5, routes, services, repositories, middleware
|
|
||||||
- mcpd MCP server CRUD: registration, profiles, projects
|
|
||||||
- mcpd Docker container management: dockerode, instance lifecycle
|
|
||||||
- mcpd audit logging, health monitoring, metrics, backup/restore
|
|
||||||
- mcpctl CLI framework: Commander.js, commands, config, API client, formatters
|
|
||||||
- mcpctl RPM distribution: bun compile, nfpm, Gitea publishing, shell completions
|
|
||||||
- MCP protocol routing in local-proxy: namespace tools, resources, prompts
|
|
||||||
- LLM provider abstractions: OpenAI, Anthropic, Ollama adapters (defined but unused)
|
|
||||||
- Shared types and profile templates
|
|
||||||
|
|
||||||
### Needs rework:
|
|
||||||
- mcpctl currently talks to mcpd directly -> must talk to mcplocal instead
|
|
||||||
- local-proxy is just a dumb router -> needs LLM pre-processing intelligence
|
|
||||||
- local-proxy has no HTTP API for mcpctl -> needs REST endpoints for management proxying
|
|
||||||
- mcpd has no MCP proxy endpoint -> needs endpoint that mcplocal can call to execute MCP tool calls on managed instances
|
|
||||||
- No integration between LLM providers and MCP request/response pipeline
|
|
||||||
|
|
||||||
## New Tasks Needed
|
|
||||||
|
|
||||||
### Phase 1: Rename and restructure local-proxy -> mcplocal
|
|
||||||
- Rename `src/local-proxy/` to `src/mcplocal/`
|
|
||||||
- Update all package references and imports
|
|
||||||
- Add HTTP REST server (Fastify) alongside existing stdio server
|
|
||||||
- mcplocal needs TWO interfaces: stdio for Claude, HTTP for mcpctl
|
|
||||||
|
|
||||||
### Phase 2: mcplocal management proxy
|
|
||||||
- Add REST endpoints that mirror mcpd's API (get servers, instances, projects, etc.)
|
|
||||||
- mcpctl config changes: `daemonUrl` now points to mcplocal (e.g., localhost:3200) instead of mcpd
|
|
||||||
- mcplocal proxies management requests to mcpd (configurable `mcpdUrl` e.g., http://nas:3100)
|
|
||||||
- Pass-through with no LLM processing for management commands
|
|
||||||
|
|
||||||
### Phase 3: mcpd MCP proxy endpoint
|
|
||||||
- Add `/api/v1/mcp/proxy` endpoint to mcpd
|
|
||||||
- Accepts: `{ serverId, method, params }` - execute an MCP tool call on a managed instance
|
|
||||||
- mcpd looks up the instance, connects to the container, executes the MCP call, returns result
|
|
||||||
- This is how mcplocal talks to MCP servers without needing direct Docker access
|
|
||||||
|
|
||||||
### Phase 4: LLM pre-processing pipeline in mcplocal
|
|
||||||
- Create request interceptor in mcplocal's MCP router
|
|
||||||
- Before forwarding `tools/call` to mcpd, run the request through LLM for interpretation
|
|
||||||
- After receiving response from mcpd, run through LLM for filtering/summarization
|
|
||||||
- LLM provider selection based on config (prefer local/cheap models)
|
|
||||||
- Configurable: enable/disable pre-processing per server or per tool
|
|
||||||
- Bypass for simple operations (list, create, delete - no filtering needed)
|
|
||||||
|
|
||||||
### Phase 5: Smart context optimization
|
|
||||||
- Token counting: estimate how many tokens the raw response would consume
|
|
||||||
- Decision logic: if raw response < threshold, skip LLM filtering (not worth the latency)
|
|
||||||
- If raw response > threshold, filter with LLM
|
|
||||||
- Cache LLM filtering decisions for repeated similar queries
|
|
||||||
- Metrics: track tokens saved, latency added by filtering
|
|
||||||
|
|
||||||
### Phase 6: mcpctl -> mcplocal migration
|
|
||||||
- Update mcpctl's default daemonUrl to point to mcplocal (localhost:3200)
|
|
||||||
- Update all CLI commands to work through mcplocal proxy
|
|
||||||
- Add `mcpctl config set mcpd-url <url>` for configuring upstream mcpd
|
|
||||||
- Add `mcpctl config set mcplocal-url <url>` for configuring local daemon
|
|
||||||
- Health check: `mcpctl status` shows both mcplocal and mcpd connectivity
|
|
||||||
- Shell completions update if needed
|
|
||||||
|
|
||||||
### Phase 7: End-to-end integration testing
|
|
||||||
- Test full flow: mcpctl -> mcplocal -> mcpd -> mcp_server -> response -> LLM filter -> Claude
|
|
||||||
- Test management commands pass through correctly
|
|
||||||
- Test LLM pre-processing reduces context window size
|
|
||||||
- Test credential isolation (mcplocal never sees MCP server credentials)
|
|
||||||
- Test health monitoring across all tiers
|
|
||||||
|
|
||||||
## Authentication & Authorization
|
|
||||||
|
|
||||||
### Database ownership
|
|
||||||
- **mcpd owns the database** (PostgreSQL). It is the only component that talks to the DB.
|
|
||||||
- mcplocal has NO database. It is stateless (config file only).
|
|
||||||
- mcpctl has NO database. It stores user credentials locally in `~/.mcpctl/config.yaml`.
|
|
||||||
|
|
||||||
### Auth flow
|
|
||||||
```
|
|
||||||
mcpctl login
|
|
||||||
|
|
|
||||||
v (user enters mcpd URL + credentials)
|
|
||||||
mcpctl stores API token in ~/.mcpctl/config.yaml
|
|
||||||
|
|
|
||||||
v (passes token to mcplocal config)
|
|
||||||
mcplocal authenticates to mcpd using Bearer token on every request
|
|
||||||
|
|
|
||||||
v (Authorization: Bearer <token>)
|
|
||||||
mcpd validates token against Session table in PostgreSQL
|
|
||||||
|
|
|
||||||
v (authenticated request proceeds)
|
|
||||||
```
|
|
||||||
|
|
||||||
### mcpctl responsibilities
|
|
||||||
- `mcpctl login` command: prompts user for mcpd URL and credentials (username/password or API token)
|
|
||||||
- `mcpctl login` calls mcpd's auth endpoint to get a session token
|
|
||||||
- Stores the token in `~/.mcpctl/config.yaml` (or `~/.mcpctl/credentials` with restricted permissions)
|
|
||||||
- Passes the token to mcplocal (either via config or as startup argument)
|
|
||||||
- `mcpctl logout` command: invalidates the session token
|
|
||||||
|
|
||||||
### mcplocal responsibilities
|
|
||||||
- Reads auth token from its config (set by mcpctl)
|
|
||||||
- Attaches `Authorization: Bearer <token>` header to ALL requests to mcpd
|
|
||||||
- If mcpd returns 401, mcplocal returns appropriate error to mcpctl/Claude
|
|
||||||
- Does NOT store credentials itself - they come from mcpctl's config
|
|
||||||
|
|
||||||
### mcpd responsibilities
|
|
||||||
- Owns User and Session tables
|
|
||||||
- Provides auth endpoints: `POST /api/v1/auth/login`, `POST /api/v1/auth/logout`
|
|
||||||
- Validates Bearer tokens on every request via auth middleware (already exists)
|
|
||||||
- Returns 401 for invalid/expired tokens
|
|
||||||
- Audit logs include the authenticated user
|
|
||||||
|
|
||||||
## Non-functional Requirements
|
|
||||||
- mcplocal must start fast (developer's machine, runs per-session or as daemon)
|
|
||||||
- LLM pre-processing must not add more than 2-3 seconds latency
|
|
||||||
- If local LLM is unavailable, fall back to passing data through unfiltered
|
|
||||||
- All components must be independently deployable and testable
|
|
||||||
- mcpd must remain stateless (outside of DB) and horizontally scalable
|
|
||||||
@@ -307,7 +307,7 @@
|
|||||||
"dependencies": [
|
"dependencies": [
|
||||||
"4"
|
"4"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [
|
"subtasks": [
|
||||||
{
|
{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
@@ -367,8 +367,7 @@
|
|||||||
"testStrategy": "Run full integration test suite. Verify coverage >85% for project-related files.",
|
"testStrategy": "Run full integration test suite. Verify coverage >85% for project-related files.",
|
||||||
"parentId": "undefined"
|
"parentId": "undefined"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"updatedAt": "2026-02-21T04:30:43.622Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "6",
|
"id": "6",
|
||||||
@@ -381,7 +380,7 @@
|
|||||||
"3",
|
"3",
|
||||||
"4"
|
"4"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [
|
"subtasks": [
|
||||||
{
|
{
|
||||||
"id": 1,
|
"id": 1,
|
||||||
@@ -466,8 +465,7 @@
|
|||||||
"testStrategy": "Unit tests for getLogs. Integration test: run container, tail logs, verify output.",
|
"testStrategy": "Unit tests for getLogs. Integration test: run container, tail logs, verify output.",
|
||||||
"parentId": "undefined"
|
"parentId": "undefined"
|
||||||
}
|
}
|
||||||
],
|
]
|
||||||
"updatedAt": "2026-02-21T04:52:51.544Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "7",
|
"id": "7",
|
||||||
@@ -516,9 +514,8 @@
|
|||||||
"dependencies": [
|
"dependencies": [
|
||||||
"7"
|
"7"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [],
|
"subtasks": []
|
||||||
"updatedAt": "2026-02-21T04:55:53.675Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "9",
|
"id": "9",
|
||||||
@@ -531,9 +528,8 @@
|
|||||||
"7",
|
"7",
|
||||||
"4"
|
"4"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [],
|
"subtasks": []
|
||||||
"updatedAt": "2026-02-21T05:14:48.368Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "10",
|
"id": "10",
|
||||||
@@ -546,9 +542,8 @@
|
|||||||
"7",
|
"7",
|
||||||
"5"
|
"5"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [],
|
"subtasks": []
|
||||||
"updatedAt": "2026-02-21T05:17:02.390Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "11",
|
"id": "11",
|
||||||
@@ -560,9 +555,8 @@
|
|||||||
"dependencies": [
|
"dependencies": [
|
||||||
"1"
|
"1"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [],
|
"subtasks": []
|
||||||
"updatedAt": "2026-02-21T05:00:28.388Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "12",
|
"id": "12",
|
||||||
@@ -574,74 +568,8 @@
|
|||||||
"dependencies": [
|
"dependencies": [
|
||||||
"11"
|
"11"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [
|
"subtasks": []
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"title": "Create main.ts entry point with configuration loading",
|
|
||||||
"description": "Implement the main.ts entry point that reads proxy configuration from file or CLI arguments, initializes upstreams based on config, and boots the StdioProxyServer.",
|
|
||||||
"dependencies": [],
|
|
||||||
"details": "Create src/local-proxy/src/main.ts that: 1) Parses command-line arguments (--config flag for JSON config path, or individual --upstream flags), 2) Loads ProxyConfig from JSON file if specified, 3) Instantiates StdioUpstream or HttpUpstream for each UpstreamConfig based on transport type, 4) Calls start() on each StdioUpstream to spawn child processes, 5) Adds all upstreams to McpRouter via addUpstream(), 6) Creates StdioProxyServer with the router and calls start(), 7) Handles SIGTERM/SIGINT for graceful shutdown calling router.closeAll(). Use a simple arg parser or process.argv directly. Export a main() function and call it when run directly.",
|
|
||||||
"status": "done",
|
|
||||||
"testStrategy": "Test config file loading with valid/invalid JSON. Test CLI argument parsing. Integration test: spawn proxy with mock upstream config and verify it starts and responds to initialize request.",
|
|
||||||
"parentId": "undefined",
|
|
||||||
"updatedAt": "2026-02-21T05:05:48.624Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"title": "Add resource forwarding support to McpRouter",
|
|
||||||
"description": "Extend McpRouter to handle resources/list and resources/read methods, forwarding them to upstream servers with proper namespacing similar to tools.",
|
|
||||||
"dependencies": [
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"details": "Modify src/local-proxy/src/router.ts to: 1) Add a resourceToServer Map similar to toolToServer, 2) Create discoverResources() method that calls resources/list on each upstream and aggregates results with namespaced URIs (e.g., 'servername://resource'), 3) Add 'resources' to capabilities in initialize response, 4) Handle 'resources/list' in route() by calling discoverResources(), 5) Handle 'resources/read' by parsing the namespaced URI, extracting server name, stripping prefix, and forwarding to correct upstream, 6) Handle 'resources/subscribe' and 'resources/unsubscribe' if needed for completeness. Update types.ts if additional resource-related types are needed.",
|
|
||||||
"status": "done",
|
|
||||||
"testStrategy": "Unit test discoverResources() with mocked upstreams returning different resources. Test resources/read routing extracts correct server and forwards properly. Test error handling when resource URI has unknown server prefix.",
|
|
||||||
"parentId": "undefined",
|
|
||||||
"updatedAt": "2026-02-21T05:05:48.626Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"title": "Add prompt forwarding support to McpRouter",
|
|
||||||
"description": "Extend McpRouter to handle prompts/list and prompts/get methods, forwarding them to upstream servers with proper namespacing.",
|
|
||||||
"dependencies": [
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"details": "Modify src/local-proxy/src/router.ts to: 1) Add a promptToServer Map for tracking prompt origins, 2) Create discoverPrompts() method that calls prompts/list on each upstream and aggregates with namespaced names (e.g., 'servername/prompt-name'), 3) Add 'prompts' to capabilities in initialize response, 4) Handle 'prompts/list' in route() by calling discoverPrompts(), 5) Handle 'prompts/get' by parsing namespaced prompt name, extracting server, stripping prefix, and forwarding to correct upstream. Follow same pattern as tools for consistency.",
|
|
||||||
"status": "done",
|
|
||||||
"testStrategy": "Unit test discoverPrompts() with mocked upstreams. Test prompts/get routing correctly forwards to upstream. Test error handling for unknown prompt names.",
|
|
||||||
"parentId": "undefined",
|
|
||||||
"updatedAt": "2026-02-21T05:05:48.638Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 4,
|
|
||||||
"title": "Implement notification forwarding from upstreams to client",
|
|
||||||
"description": "Add support for forwarding JSON-RPC notifications from upstream servers to the proxy client, enabling real-time updates like progress notifications.",
|
|
||||||
"dependencies": [
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"details": "Modify upstream classes and server: 1) Add onNotification callback to UpstreamConnection interface in types.ts, 2) Update StdioUpstream to detect notifications (messages without 'id' field) in stdout handler and invoke onNotification callback with namespaced method if needed, 3) Update HttpUpstream if SSE support is needed (may require EventSource or SSE client for true streaming), 4) Add setNotificationHandler(callback) method to McpRouter that registers handler and wires it to all upstreams, 5) Update StdioProxyServer to call router.setNotificationHandler() with a function that writes notification JSON to stdout, 6) Consider namespacing notification params to indicate source server.",
|
|
||||||
"status": "done",
|
|
||||||
"testStrategy": "Test StdioUpstream correctly identifies and forwards notifications. Integration test: upstream sends progress notification, verify proxy forwards it to stdout. Test notifications are properly namespaced with source server name.",
|
|
||||||
"parentId": "undefined",
|
|
||||||
"updatedAt": "2026-02-21T05:05:48.641Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 5,
|
|
||||||
"title": "Implement connection health monitoring with reconnection",
|
|
||||||
"description": "Add health monitoring for upstream connections with automatic status tracking, health check pings, and reconnection logic for failed STDIO upstreams.",
|
|
||||||
"dependencies": [
|
|
||||||
1,
|
|
||||||
4
|
|
||||||
],
|
|
||||||
"details": "Create src/local-proxy/src/health.ts with HealthMonitor class: 1) Track connection state for each upstream (healthy, degraded, disconnected), 2) Implement periodic health checks using ping/pong or a lightweight method like calling initialize, 3) Emit health status change events via EventEmitter pattern, 4) Add reconnection logic for StdioUpstream: detect process exit, attempt restart with exponential backoff (1s, 2s, 4s... max 30s), 5) Update McpRouter to accept HealthMonitor instance and use it to filter available upstreams, 6) Add health status to proxy logs/stderr for debugging, 7) Optionally expose health status via a special proxy method (e.g., 'proxy/health'). Update main.ts to instantiate and wire HealthMonitor.",
|
|
||||||
"status": "done",
|
|
||||||
"testStrategy": "Test health check detects unresponsive upstream. Test reconnection attempts with mocked process that fails then succeeds. Test exponential backoff timing. Test degraded upstream is excluded from tool discovery until healthy.",
|
|
||||||
"parentId": "undefined",
|
|
||||||
"updatedAt": "2026-02-21T05:05:48.643Z"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"updatedAt": "2026-02-21T05:05:48.643Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "13",
|
"id": "13",
|
||||||
@@ -653,9 +581,8 @@
|
|||||||
"dependencies": [
|
"dependencies": [
|
||||||
"12"
|
"12"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [],
|
"subtasks": []
|
||||||
"updatedAt": "2026-02-21T05:22:44.011Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "14",
|
"id": "14",
|
||||||
@@ -667,9 +594,8 @@
|
|||||||
"dependencies": [
|
"dependencies": [
|
||||||
"3"
|
"3"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [],
|
"subtasks": []
|
||||||
"updatedAt": "2026-02-21T05:09:18.694Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "15",
|
"id": "15",
|
||||||
@@ -681,71 +607,8 @@
|
|||||||
"dependencies": [
|
"dependencies": [
|
||||||
"4"
|
"4"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [
|
"subtasks": []
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"title": "Define Profile Template Types and Schemas",
|
|
||||||
"description": "Create TypeScript interfaces and Zod validation schemas for profile templates that extend the existing McpProfile type.",
|
|
||||||
"dependencies": [],
|
|
||||||
"details": "Create src/shared/src/profiles/types.ts with ProfileTemplate interface containing: id, serverType, name, displayName, description, category (filesystem/database/integration/etc), command, args, requiredEnvVars (with EnvTemplateEntry array), optionalEnvVars, defaultPermissions, setupInstructions, and documentationUrl. Also create profileTemplateSchema.ts with Zod schemas for validation. The templates should be immutable definitions that can be instantiated into actual profiles.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Unit test Zod schemas with valid and invalid template data. Verify type compatibility with existing McpServerConfig and McpProfile types.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"title": "Implement Common MCP Server Profile Templates",
|
|
||||||
"description": "Create profile template definitions for common MCP servers including filesystem, github, postgres, slack, and other popular integrations.",
|
|
||||||
"dependencies": [
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"details": "Create src/shared/src/profiles/templates/ directory with individual template files: filesystem.ts (npx @modelcontextprotocol/server-filesystem with path args), github.ts (npx @modelcontextprotocol/server-github with GITHUB_TOKEN env), postgres.ts (npx @modelcontextprotocol/server-postgres with DATABASE_URL), slack.ts (npx @modelcontextprotocol/server-slack with SLACK_TOKEN), memory.ts, and fetch.ts. Each template exports a ProfileTemplate constant with pre-configured best-practice settings. Include clear descriptions and setup guides for each.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Validate each template against the ProfileTemplate Zod schema. Verify all required fields are populated. Test that commands and args are syntactically correct.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"title": "Build Profile Registry with Lookup and Filtering",
|
|
||||||
"description": "Create a profile registry that aggregates all templates and provides lookup, filtering, and search capabilities.",
|
|
||||||
"dependencies": [
|
|
||||||
1,
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"details": "Create src/shared/src/profiles/registry.ts implementing a ProfileRegistry class with methods: getAll(), getById(id), getByCategory(category), getByServerType(type), search(query), and getCategories(). The registry should be a singleton that lazily loads all templates from the templates directory. Export a default registry instance. Also create src/shared/src/profiles/index.ts to export all profile-related types, templates, and the registry.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test registry initialization loads all templates. Test each lookup method returns correct results. Test search functionality with partial matches. Verify no duplicate IDs across templates.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 4,
|
|
||||||
"title": "Add Profile Validation and Instantiation Utilities",
|
|
||||||
"description": "Create utility functions to validate profile templates and instantiate them into concrete profile configurations.",
|
|
||||||
"dependencies": [
|
|
||||||
1,
|
|
||||||
3
|
|
||||||
],
|
|
||||||
"details": "Create src/shared/src/profiles/utils.ts with functions: validateTemplate(template) - validates a ProfileTemplate against schema, instantiateProfile(templateId, envValues) - creates a concrete profile config from a template by filling in env vars, validateEnvValues(template, envValues) - checks if all required env vars are provided, getMissingEnvVars(template, envValues) - returns list of missing required env vars, and generateMcpJsonEntry(profile) - converts instantiated profile to .mcp.json format entry.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test validateTemplate with valid and invalid templates. Test instantiateProfile produces correct configs. Test env validation catches missing required vars. Test .mcp.json output matches expected format.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 5,
|
|
||||||
"title": "Export Profiles Module and Add Integration Tests",
|
|
||||||
"description": "Export the profiles module from shared package main entry and create comprehensive integration tests.",
|
|
||||||
"dependencies": [
|
|
||||||
3,
|
|
||||||
4
|
|
||||||
],
|
|
||||||
"details": "Update src/shared/src/index.ts to add 'export * from ./profiles/index.js'. Create src/shared/src/profiles/__tests__/profiles.test.ts with tests covering: all templates are valid, registry contains expected templates, instantiation works for each template type, .mcp.json generation produces valid output, and round-trip validation (instantiate then validate). Also add documentation comments to all exported functions and types.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Run full test suite with vitest. Verify exports are accessible from @mcpctl/shared. Integration test the full workflow: lookup template, validate, instantiate with env vars, generate .mcp.json entry.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"updatedAt": "2026-02-21T05:26:02.010Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "16",
|
"id": "16",
|
||||||
@@ -757,9 +620,8 @@
|
|||||||
"dependencies": [
|
"dependencies": [
|
||||||
"6"
|
"6"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [],
|
"subtasks": []
|
||||||
"updatedAt": "2026-02-21T05:11:52.795Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "17",
|
"id": "17",
|
||||||
@@ -771,70 +633,8 @@
|
|||||||
"dependencies": [
|
"dependencies": [
|
||||||
"6"
|
"6"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [
|
"subtasks": []
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"title": "Create K8s API HTTP client and connection handling",
|
|
||||||
"description": "Implement a Kubernetes API client using node:http/https to communicate with the K8s API server, including authentication, TLS handling, and base request/response utilities.",
|
|
||||||
"dependencies": [],
|
|
||||||
"details": "Create src/mcpd/src/services/k8s/k8s-client.ts with: 1) K8sClientConfig interface supporting kubeconfig file parsing, in-cluster config detection, and direct API server URL/token config. 2) HTTP client wrapper using node:http/https that handles TLS certificates, bearer token auth, and API versioning. 3) Base request methods (get, post, delete, patch) with proper error handling and response parsing. 4) Support for watching resources with streaming responses. Reference the Docker container-manager.ts pattern for constructor options and ping() implementation.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Unit tests with mocked HTTP responses for successful API calls, auth failures, connection errors. Test kubeconfig parsing with sample config files. Test in-cluster config detection by mocking environment variables and service account token file.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"title": "Implement K8s manifest generation for MCP servers",
|
|
||||||
"description": "Create manifest generator that converts ContainerSpec to Kubernetes Pod and Deployment YAML/JSON specifications with proper resource limits and security contexts.",
|
|
||||||
"dependencies": [
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"details": "Create src/mcpd/src/services/k8s/manifest-generator.ts with: 1) generatePodSpec(spec: ContainerSpec, namespace: string) that creates a Pod manifest with container image, env vars, resource limits (CPU/memory from spec.nanoCpus and spec.memoryLimit), and labels including mcpctl.managed=true. 2) generateDeploymentSpec() for replicated deployments with selector labels. 3) generateServiceSpec() for exposing container ports. 4) Security context configuration (non-root user, read-only root filesystem, drop capabilities). 5) Map ContainerSpec fields to K8s equivalents (memoryLimit to resources.limits.memory, nanoCpus to resources.limits.cpu, etc.).",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Unit tests validating generated manifests match expected K8s spec structure. Test resource limit conversion (bytes to Ki/Mi/Gi, nanoCPUs to millicores). Test label propagation from ContainerSpec.labels. Validate manifests against K8s API schema if possible.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"title": "Implement KubernetesOrchestrator class with McpOrchestrator interface",
|
|
||||||
"description": "Create the main KubernetesOrchestrator class that implements the McpOrchestrator interface using the K8s client and manifest generator.",
|
|
||||||
"dependencies": [
|
|
||||||
1,
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"details": "Create src/mcpd/src/services/k8s/kubernetes-orchestrator.ts implementing McpOrchestrator interface: 1) Constructor accepting K8sClientConfig and default namespace. 2) ping() - call /api/v1 endpoint to verify cluster connectivity. 3) pullImage() - no-op for K8s (images pulled on pod schedule) or optionally create a pre-pull DaemonSet. 4) createContainer(spec) - generate Pod/Deployment manifest, POST to K8s API, wait for pod Ready condition, return ContainerInfo with pod name as containerId. 5) stopContainer(containerId) - scale deployment to 0 or delete pod. 6) removeContainer(containerId) - DELETE the pod/deployment resource. 7) inspectContainer(containerId) - GET pod status, map phase to ContainerInfo state (Running→running, Pending→starting, Failed→error, etc.). 8) getContainerLogs(containerId) - GET /api/v1/namespaces/{ns}/pods/{name}/log endpoint.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Integration tests with mocked K8s API responses for each method. Test createContainer returns valid ContainerInfo with mapped state. Test state mapping from K8s pod phases. Test log retrieval with tail and since parameters. Test error handling when pod not found or API errors.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 4,
|
|
||||||
"title": "Add namespace and multi-namespace support",
|
|
||||||
"description": "Extend KubernetesOrchestrator to support configurable namespaces, namespace creation, and querying resources across namespaces.",
|
|
||||||
"dependencies": [
|
|
||||||
3
|
|
||||||
],
|
|
||||||
"details": "Enhance src/mcpd/src/services/k8s/kubernetes-orchestrator.ts with: 1) Add namespace parameter to ContainerSpec or use labels to specify target namespace. 2) ensureNamespace(name) method that creates namespace if not exists (POST /api/v1/namespaces). 3) listContainers(namespace?: string) method to list all mcpctl-managed pods in a namespace or all namespaces. 4) Add namespace to ContainerInfo response. 5) Support 'default' namespace fallback and configurable default namespace in constructor. 6) Add namespace label to generated manifests for filtering. 7) Validate namespace names (DNS-1123 label format).",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test namespace creation with mocked API. Test namespace validation for invalid names. Test listing pods across namespaces. Test ContainerInfo includes correct namespace. Test default namespace fallback behavior.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 5,
|
|
||||||
"title": "Add comprehensive tests and module exports",
|
|
||||||
"description": "Create unit tests with mocked K8s API responses, integration test utilities, and export the KubernetesOrchestrator from the services module.",
|
|
||||||
"dependencies": [
|
|
||||||
3,
|
|
||||||
4
|
|
||||||
],
|
|
||||||
"details": "1) Create src/mcpd/src/services/k8s/index.ts exporting KubernetesOrchestrator, K8sClientConfig, and helper types. 2) Update src/mcpd/src/services/index.ts to export k8s module. 3) Create src/mcpd/src/services/k8s/__tests__/kubernetes-orchestrator.test.ts with mocked HTTP responses using vitest's mock system. 4) Create mock-k8s-api.ts helper that simulates K8s API responses (pod list, pod status, logs, errors). 5) Test all McpOrchestrator interface methods with success and error cases. 6) Add tests for resource limit edge cases (0 memory, very high CPU). 7) Document usage examples in code comments showing how to switch from DockerContainerManager to KubernetesOrchestrator.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Ensure all tests pass with mocked responses. Verify test coverage for all public methods. Test error scenarios (404 pod not found, 403 forbidden, 500 server error). Optional: Add integration test script that runs against kind/minikube if available.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"updatedAt": "2026-02-21T05:30:53.921Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "18",
|
"id": "18",
|
||||||
@@ -849,9 +649,8 @@
|
|||||||
"9",
|
"9",
|
||||||
"10"
|
"10"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [],
|
"subtasks": []
|
||||||
"updatedAt": "2026-02-21T05:19:02.525Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "19",
|
"id": "19",
|
||||||
@@ -859,7 +658,7 @@
|
|||||||
"description": "Merged into Task 3 subtasks",
|
"description": "Merged into Task 3 subtasks",
|
||||||
"details": null,
|
"details": null,
|
||||||
"testStrategy": null,
|
"testStrategy": null,
|
||||||
"priority": "low",
|
"priority": null,
|
||||||
"dependencies": [],
|
"dependencies": [],
|
||||||
"status": "cancelled",
|
"status": "cancelled",
|
||||||
"subtasks": [],
|
"subtasks": [],
|
||||||
@@ -871,7 +670,7 @@
|
|||||||
"description": "Merged into Task 5",
|
"description": "Merged into Task 5",
|
||||||
"details": null,
|
"details": null,
|
||||||
"testStrategy": null,
|
"testStrategy": null,
|
||||||
"priority": "low",
|
"priority": null,
|
||||||
"dependencies": [],
|
"dependencies": [],
|
||||||
"status": "cancelled",
|
"status": "cancelled",
|
||||||
"subtasks": [],
|
"subtasks": [],
|
||||||
@@ -883,7 +682,7 @@
|
|||||||
"description": "Merged into Task 14",
|
"description": "Merged into Task 14",
|
||||||
"details": null,
|
"details": null,
|
||||||
"testStrategy": null,
|
"testStrategy": null,
|
||||||
"priority": "low",
|
"priority": null,
|
||||||
"dependencies": [],
|
"dependencies": [],
|
||||||
"status": "cancelled",
|
"status": "cancelled",
|
||||||
"subtasks": [],
|
"subtasks": [],
|
||||||
@@ -900,72 +699,8 @@
|
|||||||
"6",
|
"6",
|
||||||
"14"
|
"14"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [
|
"subtasks": []
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"title": "Create MetricsCollector Service",
|
|
||||||
"description": "Implement a MetricsCollector service in src/mcpd/src/services/metrics-collector.ts that tracks instance health metrics, uptime, request counts, error rates, and resource usage data.",
|
|
||||||
"dependencies": [],
|
|
||||||
"details": "Create MetricsCollector class with methods: recordRequest(), recordError(), updateInstanceMetrics(), getMetrics(). Store metrics in-memory using Map<instanceId, InstanceMetrics>. Define InstanceMetrics interface with fields: instanceId, status, uptime, requestCount, errorCount, lastRequestAt, memoryUsage, cpuUsage. Inject IMcpInstanceRepository and McpOrchestrator dependencies to gather real-time instance status from containers. Export service from src/mcpd/src/services/index.ts.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Unit tests with mocked repository and orchestrator dependencies. Test metric recording, aggregation, and retrieval. Verify error rate calculations and uptime tracking accuracy.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"title": "Implement Health Aggregation Service",
|
|
||||||
"description": "Create a HealthAggregator service that computes overall system health by aggregating health status across all MCP server instances.",
|
|
||||||
"dependencies": [
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"details": "Add HealthAggregator class in src/mcpd/src/services/health-aggregator.ts. Methods: getOverview() returns SystemHealth with totalInstances, healthyCount, unhealthyCount, errorCount, and overallStatus (healthy/degraded/unhealthy). Use MetricsCollector to gather per-instance metrics. Include orchestrator.ping() check for runtime availability. Compute aggregate error rate and average uptime. Export from services/index.ts.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Unit tests with mocked MetricsCollector. Test aggregation logic for various instance states. Verify overall status determination rules (e.g., >50% unhealthy = degraded).",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"title": "Create Health Monitoring REST Endpoints",
|
|
||||||
"description": "Implement REST endpoints for health monitoring: GET /api/v1/health/overview, GET /api/v1/health/instances/:id, and GET /api/v1/metrics in src/mcpd/src/routes/health-monitoring.ts.",
|
|
||||||
"dependencies": [
|
|
||||||
1,
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"details": "Create registerHealthMonitoringRoutes(app, deps) function. GET /api/v1/health/overview returns SystemHealth from HealthAggregator.getOverview(). GET /api/v1/health/instances/:id returns InstanceMetrics for specific instance from MetricsCollector. GET /api/v1/metrics returns all metrics in Prometheus-compatible format or JSON. Add proper error handling for 404 when instance not found. Register routes in src/mcpd/src/routes/index.ts and wire up in server.ts.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Integration tests using Fastify inject(). Test all three endpoints with mocked services. Verify 200 responses with correct payload structure, 404 for missing instances.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 4,
|
|
||||||
"title": "Add Request/Error Metrics Middleware",
|
|
||||||
"description": "Create middleware in src/mcpd/src/middleware/metrics.ts that intercepts requests to record metrics for request counts and error rates per instance.",
|
|
||||||
"dependencies": [
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"details": "Implement Fastify preHandler hook that extracts instance ID from request params/query where applicable. Record request start time. Use onResponse hook to record completion and calculate latency. Use onError hook to record errors with MetricsCollector.recordError(). Track metrics per-route and per-instance. Register middleware in src/mcpd/src/middleware/index.ts. Apply to instance-related routes (/api/v1/instances/*) to track per-instance metrics.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Unit tests verifying hooks call MetricsCollector methods. Integration tests confirming request/error counts increment correctly after API calls.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 5,
|
|
||||||
"title": "Write Comprehensive Health Monitoring Tests",
|
|
||||||
"description": "Create test suite in src/mcpd/tests/health-monitoring.test.ts covering MetricsCollector, HealthAggregator, health monitoring routes, and metrics middleware.",
|
|
||||||
"dependencies": [
|
|
||||||
1,
|
|
||||||
2,
|
|
||||||
3,
|
|
||||||
4
|
|
||||||
],
|
|
||||||
"details": "Write tests for: MetricsCollector - test recordRequest(), recordError(), getMetrics(), concurrent access safety. HealthAggregator - test getOverview() with various instance states, edge cases (no instances, all unhealthy). Routes - test /api/v1/health/overview, /api/v1/health/instances/:id, /api/v1/metrics endpoints with mocked dependencies. Middleware - test request counting, error tracking, latency recording. Use vi.mock() for dependencies following existing test patterns in the codebase.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Self-referential - this subtask IS the test implementation. Verify all tests pass with `npm test`. Aim for >80% coverage on new health monitoring code.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"updatedAt": "2026-02-21T05:34:25.289Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "23",
|
"id": "23",
|
||||||
@@ -978,71 +713,8 @@
|
|||||||
"2",
|
"2",
|
||||||
"5"
|
"5"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [
|
"subtasks": []
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"title": "Implement BackupService for JSON export",
|
|
||||||
"description": "Create BackupService in src/mcpd/src/services/backup/ that exports servers, profiles, and projects from repositories to a structured JSON bundle.",
|
|
||||||
"dependencies": [],
|
|
||||||
"details": "Create BackupService class that uses IMcpServerRepository, IMcpProfileRepository, and IProjectRepository to fetch all data. Define a BackupBundle interface with metadata (version, timestamp, mcpctlVersion), servers array, profiles array, and projects array. Implement createBackup() method that aggregates all data into the bundle format. Add optional filtering by resource type (e.g., only servers, or only specific profiles). Export via services/index.ts following existing patterns.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Unit test BackupService with mocked repositories. Verify bundle structure includes all expected fields. Test filtering options. Test handling of empty repositories.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"title": "Add secrets encryption using Node crypto",
|
|
||||||
"description": "Implement AES-256-GCM encryption for sensitive data in backup bundles using password-derived keys via scrypt.",
|
|
||||||
"dependencies": [
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"details": "Create crypto utility module in src/mcpd/src/services/backup/crypto.ts using Node's built-in crypto module. Implement deriveKey() using scrypt with configurable salt length and key length. Implement encrypt() that creates IV, encrypts data with AES-256-GCM, and returns base64-encoded result with IV and auth tag prepended. Implement decrypt() that reverses the process. In BackupService, detect fields containing secrets (env vars with sensitive patterns like *_KEY, *_SECRET, *_TOKEN, PASSWORD) and encrypt them. Store encryption metadata (algorithm, salt) in bundle header.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test encryption/decryption round-trip with various data sizes. Verify wrong password fails decryption. Test key derivation produces consistent results with same inputs. Test detection of sensitive field patterns.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"title": "Implement RestoreService for JSON import",
|
|
||||||
"description": "Create RestoreService that imports a backup bundle back into the system, handling decryption and conflict resolution.",
|
|
||||||
"dependencies": [
|
|
||||||
1,
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"details": "Create RestoreService class in src/mcpd/src/services/backup/. Implement restore() method that parses JSON bundle, validates version compatibility, decrypts encrypted fields using provided password, and imports data using repositories. Support conflict resolution strategies: 'skip' (ignore existing), 'overwrite' (replace existing), 'fail' (abort on conflict). Implement validateBundle() for schema validation before import. Handle partial failures with transaction-like rollback or detailed error reporting.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test restore with valid bundle creates expected resources. Test conflict resolution modes (skip, overwrite, fail). Test encrypted bundle restore with correct/incorrect passwords. Test invalid bundle rejection.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 4,
|
|
||||||
"title": "Add REST endpoints for backup and restore",
|
|
||||||
"description": "Create REST API routes in src/mcpd/src/routes/ for triggering backup creation and restore operations.",
|
|
||||||
"dependencies": [
|
|
||||||
1,
|
|
||||||
2,
|
|
||||||
3
|
|
||||||
],
|
|
||||||
"details": "Create backup.ts routes file with: POST /api/v1/backup (create backup, optional password for encryption, returns JSON bundle), POST /api/v1/restore (accepts JSON bundle in body, password if encrypted, conflict strategy option, returns import summary). Register routes in routes/index.ts. Define BackupDeps interface following existing patterns. Add appropriate error handling for invalid bundles, decryption failures, and conflict errors. Include validation schemas for request bodies.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Integration test backup endpoint returns valid JSON bundle. Test restore endpoint with valid/invalid bundles. Test encrypted backup/restore round-trip via API. Test error responses for various failure scenarios.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 5,
|
|
||||||
"title": "Add CLI commands for backup and restore",
|
|
||||||
"description": "Implement CLI commands in src/cli/src/commands/ for backup export to file and restore from file.",
|
|
||||||
"dependencies": [
|
|
||||||
4
|
|
||||||
],
|
|
||||||
"details": "Create backup.ts commands file with: 'mcpctl backup' command with options --output/-o (file path), --encrypt (prompt for password), --resources (filter: servers,profiles,projects). Create 'mcpctl restore' command with options --input/-i (file path), --password (or prompt if encrypted), --conflict (skip|overwrite|fail). Commands should call the daemon API endpoints. Add progress output and summary of backed up/restored resources. Register commands in cli/src/index.ts following existing createXxxCommand pattern.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test backup command creates valid file. Test restore command from backup file. Test encryption password prompting. Test --resources filtering. Test various conflict resolution modes via CLI.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"updatedAt": "2026-02-21T05:40:51.787Z"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"id": "24",
|
"id": "24",
|
||||||
@@ -1054,367 +726,15 @@
|
|||||||
"dependencies": [
|
"dependencies": [
|
||||||
"1"
|
"1"
|
||||||
],
|
],
|
||||||
"status": "done",
|
"status": "pending",
|
||||||
"subtasks": [],
|
"subtasks": []
|
||||||
"updatedAt": "2026-02-21T05:12:31.235Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "25",
|
|
||||||
"title": "Rename local-proxy to mcplocal",
|
|
||||||
"description": "Rename the src/local-proxy directory to src/mcplocal and update all package references, imports, and build configurations throughout the monorepo.",
|
|
||||||
"details": "1. Rename directory: `mv src/local-proxy src/mcplocal`\n2. Update package.json name from `@mcpctl/local-proxy` to `@mcpctl/mcplocal`\n3. Update pnpm-workspace.yaml if needed\n4. Update all imports in other packages that reference local-proxy:\n - Search for `@mcpctl/local-proxy` and replace with `@mcpctl/mcplocal`\n - Check tsconfig references and path mappings\n5. Update any scripts in package.json root that reference local-proxy\n6. Update docker-compose files in deploy/ if they reference local-proxy\n7. Update documentation and README references\n8. Run `pnpm install` to regenerate lockfile with new package name\n9. Verify TypeScript compilation succeeds: `pnpm build`\n10. Run existing tests to ensure nothing broke: `pnpm test`",
|
|
||||||
"testStrategy": "1. Verify directory rename completed: `ls src/mcplocal`\n2. Verify package.json has correct name\n3. Run `pnpm install` - should complete without errors\n4. Run `pnpm build` - all packages should compile\n5. Run `pnpm test` - all existing tests should pass\n6. Grep codebase for 'local-proxy' - should find no stale references except git history",
|
|
||||||
"priority": "high",
|
|
||||||
"dependencies": [],
|
|
||||||
"status": "done",
|
|
||||||
"subtasks": [],
|
|
||||||
"updatedAt": "2026-02-21T18:04:17.018Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "26",
|
|
||||||
"title": "Add HTTP REST server to mcplocal",
|
|
||||||
"description": "Add a Fastify HTTP server to mcplocal that runs alongside the existing stdio server, providing REST endpoints for mcpctl management commands.",
|
|
||||||
"details": "1. Add Fastify dependency to mcplocal package.json: `@fastify/cors`, `fastify`\n2. Create `src/mcplocal/src/http/server.ts` with Fastify app setup:\n ```typescript\n import Fastify from 'fastify';\n import cors from '@fastify/cors';\n \n export async function createHttpServer(config: HttpServerConfig) {\n const app = Fastify({ logger: true });\n await app.register(cors, { origin: true });\n // Register routes\n return app;\n }\n ```\n3. Create `src/mcplocal/src/http/routes/` directory structure\n4. Create health check endpoint: `GET /health`\n5. Create config types in `src/mcplocal/src/config.ts`:\n - `httpPort`: number (default 3200)\n - `httpHost`: string (default '127.0.0.1')\n - `mcpdUrl`: string (default 'http://localhost:3100')\n6. Update mcplocal entry point to start both servers:\n - stdio server for Claude MCP protocol\n - HTTP server for mcpctl REST API\n7. Add graceful shutdown handling for both servers",
|
|
||||||
"testStrategy": "1. Unit test: HTTP server starts on configured port\n2. Unit test: Health endpoint returns 200 OK\n3. Integration test: Both stdio and HTTP servers can run simultaneously\n4. Test graceful shutdown stops both servers cleanly\n5. Test CORS headers are present on responses\n6. Manual test: curl http://localhost:3200/health",
|
|
||||||
"priority": "high",
|
|
||||||
"dependencies": [
|
|
||||||
"25"
|
|
||||||
],
|
|
||||||
"status": "done",
|
|
||||||
"subtasks": [],
|
|
||||||
"updatedAt": "2026-02-21T18:09:26.322Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "27",
|
|
||||||
"title": "Implement mcplocal management proxy routes",
|
|
||||||
"description": "Add REST endpoints to mcplocal that mirror mcpd's API and proxy management requests to mcpd without LLM processing. All requests must include proper authentication to mcpd using a Bearer token read from mcplocal config.",
|
|
||||||
"status": "done",
|
|
||||||
"dependencies": [
|
|
||||||
"26"
|
|
||||||
],
|
|
||||||
"priority": "high",
|
|
||||||
"details": "1. Create HTTP client for mcpd communication with auth: `src/local-proxy/src/http/mcpd-client.ts`\n ```typescript\n export class McpdClient {\n private token: string;\n \n constructor(private baseUrl: string, token: string) {\n this.token = token;\n }\n \n private getHeaders(): Record<string, string> {\n return {\n 'Content-Type': 'application/json',\n 'Authorization': `Bearer ${this.token}`\n };\n }\n \n async get<T>(path: string): Promise<T> {\n const response = await fetch(`${this.baseUrl}${path}`, {\n method: 'GET',\n headers: this.getHeaders()\n });\n await this.handleAuthError(response);\n return response.json();\n }\n \n async post<T>(path: string, body: unknown): Promise<T> {\n const response = await fetch(`${this.baseUrl}${path}`, {\n method: 'POST',\n headers: this.getHeaders(),\n body: JSON.stringify(body)\n });\n await this.handleAuthError(response);\n return response.json();\n }\n \n async put<T>(path: string, body: unknown): Promise<T> {\n const response = await fetch(`${this.baseUrl}${path}`, {\n method: 'PUT',\n headers: this.getHeaders(),\n body: JSON.stringify(body)\n });\n await this.handleAuthError(response);\n return response.json();\n }\n \n async delete<T>(path: string): Promise<T> {\n const response = await fetch(`${this.baseUrl}${path}`, {\n method: 'DELETE',\n headers: this.getHeaders()\n });\n await this.handleAuthError(response);\n return response.json();\n }\n \n private async handleAuthError(response: Response): Promise<void> {\n if (response.status === 401) {\n throw new AuthenticationError('Invalid or expired token. Please check mcplocal config.');\n }\n }\n }\n \n export class AuthenticationError extends Error {\n constructor(message: string) {\n super(message);\n this.name = 'AuthenticationError';\n }\n }\n ```\n2. Add token to mcplocal config type (extend ProxyConfig or similar):\n ```typescript\n export interface McpdAuthConfig {\n /** Bearer token for mcpd API authentication */\n mcpdToken: string;\n }\n ```\n3. Create proxy routes in `src/local-proxy/src/http/routes/`:\n - `servers.ts`: GET/POST /api/v1/servers, GET/PUT/DELETE /api/v1/servers/:id\n - `profiles.ts`: GET/POST /api/v1/profiles, GET/PUT/DELETE /api/v1/profiles/:id\n - `instances.ts`: GET/POST /api/v1/instances, GET/POST/DELETE /api/v1/instances/:id, etc.\n - `projects.ts`: GET/POST /api/v1/projects, etc.\n - `audit.ts`: GET /api/v1/audit-logs\n - `backup.ts`: POST /api/v1/backup, POST /api/v1/restore\n4. Each route handler forwards to mcpd with auth:\n ```typescript\n app.get('/api/v1/servers', async (req, reply) => {\n try {\n const result = await mcpdClient.get('/api/v1/servers');\n return result;\n } catch (error) {\n if (error instanceof AuthenticationError) {\n return reply.status(401).send({ error: error.message });\n }\n throw error;\n }\n });\n ```\n5. Add comprehensive error handling:\n - If mcpd is unreachable, return 503 Service Unavailable\n - If mcpd returns 401, return 401 with clear message about token configuration\n - Forward other HTTP errors from mcpd with appropriate status codes\n6. Add request/response logging for debugging",
|
|
||||||
"testStrategy": "1. Unit test: McpdClient attaches Authorization header to all request methods (GET, POST, PUT, DELETE)\n2. Unit test: McpdClient throws AuthenticationError on 401 response from mcpd\n3. Unit test: Each proxy route forwards requests correctly with auth headers\n4. Unit test: Error handling when mcpd is unreachable (503 response)\n5. Unit test: Error handling when mcpd returns 401 (clear error message returned)\n6. Integration test: Full request flow mcpctl -> mcplocal -> mcpd with valid token\n7. Integration test: Full request flow with invalid token returns 401\n8. Test query parameters are forwarded correctly\n9. Test request body is forwarded correctly for POST/PUT\n10. Test path parameters (:id) are passed through correctly\n11. Mock mcpd responses and verify mcplocal returns them unchanged\n12. Test token is read correctly from mcplocal config",
|
|
||||||
"subtasks": [],
|
|
||||||
"updatedAt": "2026-02-21T18:34:20.942Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "28",
|
|
||||||
"title": "Add MCP proxy endpoint to mcpd",
|
|
||||||
"description": "Create a new endpoint in mcpd at /api/v1/mcp/proxy that accepts MCP tool call requests and executes them on managed MCP server instances. Also add authentication endpoints (login/logout) that mcpctl will use to authenticate users.",
|
|
||||||
"status": "done",
|
|
||||||
"dependencies": [],
|
|
||||||
"priority": "high",
|
|
||||||
"details": "## MCP Proxy Endpoint\n\n1. Create new route file: `src/mcpd/src/routes/mcp-proxy.ts`\n2. Define request schema:\n ```typescript\n interface McpProxyRequest {\n serverId: string; // or instanceId\n method: string; // e.g., 'tools/call', 'resources/read'\n params: Record<string, unknown>;\n }\n ```\n3. Create McpProxyService in `src/mcpd/src/services/mcp-proxy-service.ts`:\n - Look up instance by serverId (auto-start if profile allows)\n - Connect to the container via stdio or HTTP (depending on transport type)\n - Execute the MCP JSON-RPC call\n - Return the result\n4. Handle MCP JSON-RPC protocol:\n ```typescript\n async executeCall(instanceId: string, method: string, params: unknown) {\n const instance = await this.instanceService.getInstance(instanceId);\n const connection = await this.getOrCreateConnection(instance);\n const result = await connection.call(method, params);\n return result;\n }\n ```\n5. Connection pooling: maintain persistent connections to running instances\n6. Add route: `POST /api/v1/mcp/proxy` (must be behind auth middleware)\n7. Add audit logging for all MCP proxy calls - include authenticated userId from request.userId\n8. Handle errors: instance not found, instance not running, MCP call failed\n\n## Authentication Endpoints\n\n9. Create auth routes file: `src/mcpd/src/routes/auth.ts`\n10. Implement `POST /api/v1/auth/login`:\n - Request body: `{ username: string, password: string }`\n - Validate credentials against User table (use bcrypt for password comparison)\n - Create new Session record with token (use crypto.randomUUID or similar)\n - Response: `{ token: string, expiresAt: string }`\n11. Implement `POST /api/v1/auth/logout`:\n - Requires Bearer token in Authorization header\n - Delete/invalidate the Session record\n - Response: `{ success: true }`\n\n## Auth Integration Notes\n\n- Existing auth middleware in `src/mcpd/src/middleware/auth.ts` validates Bearer tokens against Session table\n- It sets `request.userId` on successful authentication\n- MCP proxy endpoint MUST use this auth middleware\n- Auth endpoints (login) should NOT require auth middleware\n- Logout endpoint SHOULD require auth middleware to validate the session being invalidated",
|
|
||||||
"testStrategy": "1. Unit test: Proxy service looks up correct instance\n2. Unit test: JSON-RPC call is formatted correctly\n3. Integration test: Full flow with a mock MCP server container\n4. Test error handling: non-existent server returns 404\n5. Test error handling: stopped instance returns appropriate error\n6. Test audit log entries include authenticated userId\n7. Test connection reuse for multiple calls to same instance\n8. Test login endpoint: valid credentials return session token\n9. Test login endpoint: invalid credentials return 401\n10. Test logout endpoint: valid session is invalidated\n11. Test logout endpoint: invalid/missing token returns 401\n12. Test MCP proxy endpoint without auth token returns 401\n13. Test MCP proxy endpoint with expired token returns 401\n14. Test MCP proxy endpoint with valid token succeeds and logs userId in audit",
|
|
||||||
"subtasks": [
|
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"title": "Create auth routes with login/logout endpoints",
|
|
||||||
"description": "Create src/mcpd/src/routes/auth.ts with POST /api/v1/auth/login and POST /api/v1/auth/logout endpoints for mcpctl authentication.",
|
|
||||||
"dependencies": [],
|
|
||||||
"details": "Implement login endpoint: validate username/password against User table using bcrypt, create Session record with generated token and expiry. Implement logout endpoint: require auth middleware, delete/invalidate Session record. Login does NOT require auth, logout DOES require auth. Export registerAuthRoutes function and update routes/index.ts.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test login with valid/invalid credentials. Test logout invalidates session. Test logout requires valid auth token. Test session token format and expiry.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"title": "Create MCP proxy route file with auth middleware",
|
|
||||||
"description": "Create src/mcpd/src/routes/mcp-proxy.ts with POST /api/v1/mcp/proxy endpoint protected by auth middleware.",
|
|
||||||
"dependencies": [
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"details": "Define McpProxyRequest interface (serverId, method, params). Register route handler that extracts userId from request.userId (set by auth middleware). Apply auth middleware using preHandler hook. Validate request body schema.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test endpoint returns 401 without auth token. Test endpoint returns 401 with invalid/expired token. Test valid auth token allows request through.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"title": "Create McpProxyService for instance lookup and connection",
|
|
||||||
"description": "Create src/mcpd/src/services/mcp-proxy-service.ts to handle instance lookup, connection management, and MCP call execution.",
|
|
||||||
"dependencies": [],
|
|
||||||
"details": "Implement getInstance to look up by serverId, auto-start if profile allows. Implement getOrCreateConnection for connection pooling. Handle both stdio and HTTP transports. Implement executeCall method that formats JSON-RPC call and returns result.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Unit test instance lookup. Unit test connection pooling reuses connections. Test auto-start behavior. Test both transport types.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 4,
|
|
||||||
"title": "Implement MCP JSON-RPC call execution",
|
|
||||||
"description": "Implement the core JSON-RPC call logic in McpProxyService to execute tool calls on MCP server instances.",
|
|
||||||
"dependencies": [
|
|
||||||
3
|
|
||||||
],
|
|
||||||
"details": "Format JSON-RPC 2.0 request with method and params. Send request over established connection (stdio/HTTP). Parse JSON-RPC response and handle errors. Return result or throw appropriate error for failed calls.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Unit test JSON-RPC request formatting. Test successful call returns result. Test JSON-RPC error responses are handled. Integration test with mock MCP server.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 5,
|
|
||||||
"title": "Add audit logging with userId for MCP proxy calls",
|
|
||||||
"description": "Ensure all MCP proxy calls are logged to audit log including the authenticated userId from the session.",
|
|
||||||
"dependencies": [
|
|
||||||
2,
|
|
||||||
4
|
|
||||||
],
|
|
||||||
"details": "Use existing audit middleware/service. Include userId from request.userId in audit log entry. Log serverId, method, and outcome (success/failure). Log any errors that occur during MCP call execution.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test audit log entries contain userId. Test audit log entries contain serverId and method. Test failed calls are logged with error details.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 6,
|
|
||||||
"title": "Integrate auth and proxy routes into server.ts",
|
|
||||||
"description": "Register the new auth and mcp-proxy routes in the Fastify server with proper auth middleware wiring.",
|
|
||||||
"dependencies": [
|
|
||||||
1,
|
|
||||||
2,
|
|
||||||
5
|
|
||||||
],
|
|
||||||
"details": "Update server.ts to register auth routes (no auth required for login). Register mcp-proxy routes with auth middleware. Ensure auth middleware is wired with findSession dependency from Prisma. Update routes/index.ts exports.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Integration test full login -> proxy call flow. Test auth middleware correctly protects proxy endpoint. Test health endpoints remain unauthenticated.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"updatedAt": "2026-02-21T18:09:26.327Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "29",
|
|
||||||
"title": "Implement LLM pre-processing pipeline in mcplocal",
|
|
||||||
"description": "Create the core LLM pre-processing pipeline that intercepts MCP tool calls, uses a local LLM to optimize requests before sending to mcpd, and filters responses before returning to Claude.",
|
|
||||||
"details": "1. Create `src/mcplocal/src/llm/processor.ts` - the core pipeline:\n ```typescript\n export class LlmProcessor {\n constructor(\n private providerRegistry: ProviderRegistry,\n private config: LlmProcessorConfig\n ) {}\n \n async preprocessRequest(toolName: string, params: unknown): Promise<ProcessedRequest> {\n // Use LLM to interpret and optimize the request\n const prompt = this.buildRequestPrompt(toolName, params);\n const result = await this.providerRegistry.getActiveProvider().complete({\n systemPrompt: REQUEST_OPTIMIZATION_SYSTEM_PROMPT,\n userPrompt: prompt\n });\n return this.parseOptimizedRequest(result);\n }\n \n async filterResponse(toolName: string, originalRequest: unknown, rawResponse: unknown): Promise<FilteredResponse> {\n // Use LLM to filter/summarize the response\n const prompt = this.buildFilterPrompt(toolName, originalRequest, rawResponse);\n const result = await this.providerRegistry.getActiveProvider().complete({\n systemPrompt: RESPONSE_FILTER_SYSTEM_PROMPT,\n userPrompt: prompt\n });\n return this.parseFilteredResponse(result);\n }\n }\n ```\n2. Create system prompts in `src/mcplocal/src/llm/prompts.ts`:\n - REQUEST_OPTIMIZATION_SYSTEM_PROMPT: instruct LLM to generate optimal queries\n - RESPONSE_FILTER_SYSTEM_PROMPT: instruct LLM to extract relevant information\n3. Integrate into router.ts - wrap tools/call handler:\n ```typescript\n async handleToolsCall(request: JsonRpcRequest) {\n if (this.shouldPreprocess(request.params.name)) {\n const processed = await this.llmProcessor.preprocessRequest(...);\n // Call mcpd with processed request\n const rawResponse = await this.callMcpd(processed);\n const filtered = await this.llmProcessor.filterResponse(...);\n return filtered;\n }\n return this.callMcpd(request.params);\n }\n ```\n4. Add configuration options:\n - `enablePreprocessing`: boolean\n - `preprocessingExclude`: string[] (tool names to skip)\n - `preferredProvider`: string (ollama, gemini, deepseek, etc.)\n5. Add bypass logic for simple operations (list, create, delete)",
|
|
||||||
"testStrategy": "1. Unit test: Request preprocessing generates optimized queries\n2. Unit test: Response filtering reduces data volume\n3. Unit test: Bypass logic works for excluded tools\n4. Integration test: Full pipeline with mock LLM provider\n5. Test error handling: LLM failure falls back to unfiltered pass-through\n6. Test configuration options are respected\n7. Measure: response size reduction percentage",
|
|
||||||
"priority": "high",
|
|
||||||
"dependencies": [
|
|
||||||
"25",
|
|
||||||
"27",
|
|
||||||
"28"
|
|
||||||
],
|
|
||||||
"status": "done",
|
|
||||||
"subtasks": [],
|
|
||||||
"updatedAt": "2026-02-21T18:41:26.539Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "30",
|
|
||||||
"title": "Add Gemini CLI LLM provider",
|
|
||||||
"description": "Implement a new LLM provider that uses the Gemini CLI binary for local, free LLM inference as the preferred provider for pre-processing.",
|
|
||||||
"details": "1. Create `src/mcplocal/src/providers/gemini-cli.ts`:\n ```typescript\n import { spawn } from 'child_process';\n \n export class GeminiCliProvider implements LlmProvider {\n readonly name = 'gemini-cli';\n private binaryPath: string;\n \n constructor(config: GeminiCliConfig) {\n this.binaryPath = config.binaryPath || 'gemini';\n }\n \n async isAvailable(): Promise<boolean> {\n // Check if gemini binary exists and is executable\n try {\n await this.runCommand(['--version']);\n return true;\n } catch {\n return false;\n }\n }\n \n async complete(options: CompletionOptions): Promise<CompletionResult> {\n const input = this.formatPrompt(options);\n const output = await this.runCommand(['--prompt', input]);\n return { content: output, model: 'gemini-cli' };\n }\n \n private async runCommand(args: string[]): Promise<string> {\n // Spawn gemini CLI process and capture output\n }\n }\n ```\n2. Research actual Gemini CLI interface and adjust implementation\n3. Add to provider registry with high priority (prefer over API providers)\n4. Add configuration: `geminiCliBinaryPath`\n5. Handle timeout for slow inference\n6. Add fallback to next provider if Gemini CLI fails",
|
|
||||||
"testStrategy": "1. Unit test: Provider correctly detects CLI availability\n2. Unit test: Prompt formatting is correct\n3. Unit test: Output parsing handles various formats\n4. Integration test: Full completion with actual Gemini CLI (if available)\n5. Test timeout handling for slow responses\n6. Test fallback when CLI is not installed",
|
|
||||||
"priority": "medium",
|
|
||||||
"dependencies": [
|
|
||||||
"25"
|
|
||||||
],
|
|
||||||
"status": "done",
|
|
||||||
"subtasks": [],
|
|
||||||
"updatedAt": "2026-02-21T18:34:20.968Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "31",
|
|
||||||
"title": "Add DeepSeek API LLM provider",
|
|
||||||
"description": "Implement DeepSeek API provider as a cheap cloud-based fallback when local LLMs are unavailable.",
|
|
||||||
"details": "1. Create `src/mcplocal/src/providers/deepseek.ts`:\n ```typescript\n export class DeepSeekProvider implements LlmProvider {\n readonly name = 'deepseek';\n private apiKey: string;\n private baseUrl = 'https://api.deepseek.com/v1';\n \n constructor(config: DeepSeekConfig) {\n this.apiKey = config.apiKey || process.env.DEEPSEEK_API_KEY;\n }\n \n async isAvailable(): Promise<boolean> {\n return !!this.apiKey;\n }\n \n async complete(options: CompletionOptions): Promise<CompletionResult> {\n // DeepSeek uses OpenAI-compatible API\n const response = await fetch(`${this.baseUrl}/chat/completions`, {\n method: 'POST',\n headers: {\n 'Authorization': `Bearer ${this.apiKey}`,\n 'Content-Type': 'application/json'\n },\n body: JSON.stringify({\n model: 'deepseek-chat',\n messages: [{ role: 'user', content: options.userPrompt }]\n })\n });\n // Parse and return\n }\n }\n ```\n2. Add DEEPSEEK_API_KEY to configuration\n3. Register in provider registry with medium priority\n4. Support both deepseek-chat and deepseek-coder models\n5. Add rate limiting handling",
|
|
||||||
"testStrategy": "1. Unit test: Provider correctly checks API key availability\n2. Unit test: Request formatting matches DeepSeek API spec\n3. Unit test: Response parsing handles all fields\n4. Integration test: Full completion with actual API (with valid key)\n5. Test error handling for rate limits\n6. Test error handling for invalid API key",
|
|
||||||
"priority": "medium",
|
|
||||||
"dependencies": [
|
|
||||||
"25"
|
|
||||||
],
|
|
||||||
"status": "done",
|
|
||||||
"subtasks": [],
|
|
||||||
"updatedAt": "2026-02-21T18:34:20.974Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "32",
|
|
||||||
"title": "Implement smart context optimization",
|
|
||||||
"description": "Add token counting and decision logic to intelligently skip LLM filtering when responses are small enough, and cache filtering decisions for repeated queries.",
|
|
||||||
"details": "1. Create `src/mcplocal/src/llm/token-counter.ts`:\n ```typescript\n export function estimateTokens(text: string): number {\n // Simple estimation: ~4 chars per token for English\n // More accurate: use tiktoken or similar library\n return Math.ceil(text.length / 4);\n }\n ```\n2. Create `src/mcplocal/src/llm/filter-cache.ts`:\n ```typescript\n export class FilterCache {\n private cache: LRUCache<string, FilterDecision>;\n \n shouldFilter(toolName: string, params: unknown, responseSize: number): boolean {\n const key = this.computeKey(toolName, params);\n const cached = this.cache.get(key);\n if (cached) return cached.shouldFilter;\n // No cache hit - use default threshold logic\n return responseSize > this.tokenThreshold;\n }\n \n recordDecision(toolName: string, params: unknown, decision: FilterDecision): void {\n const key = this.computeKey(toolName, params);\n this.cache.set(key, decision);\n }\n }\n ```\n3. Add configuration options:\n - `tokenThreshold`: number (default 1000 tokens)\n - `filterCacheSize`: number (default 1000 entries)\n - `filterCacheTtl`: number (default 3600 seconds)\n4. Integrate into LlmProcessor:\n ```typescript\n async filterResponse(...) {\n const tokens = estimateTokens(JSON.stringify(rawResponse));\n if (tokens < this.config.tokenThreshold) {\n // Not worth filtering - return as-is\n return { filtered: false, response: rawResponse };\n }\n // Proceed with LLM filtering\n }\n ```\n5. Add metrics tracking:\n - Total tokens processed\n - Tokens saved by filtering\n - Filter cache hit rate\n - Average latency added by filtering",
|
|
||||||
"testStrategy": "1. Unit test: Token estimation is reasonably accurate\n2. Unit test: Cache correctly stores and retrieves decisions\n3. Unit test: Threshold logic skips filtering for small responses\n4. Unit test: Cache TTL expiration works correctly\n5. Integration test: Metrics are recorded accurately\n6. Performance test: Cache improves latency for repeated queries",
|
|
||||||
"priority": "medium",
|
|
||||||
"dependencies": [
|
|
||||||
"29"
|
|
||||||
],
|
|
||||||
"status": "done",
|
|
||||||
"subtasks": [],
|
|
||||||
"updatedAt": "2026-02-21T18:47:07.709Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "33",
|
|
||||||
"title": "Update mcpctl to use mcplocal as daemon",
|
|
||||||
"description": "Modify mcpctl CLI to connect to mcplocal instead of mcpd directly, update configuration options, add dual connectivity status checking, and implement authentication commands (login/logout) with secure credential storage.",
|
|
||||||
"status": "done",
|
|
||||||
"dependencies": [
|
|
||||||
"27"
|
|
||||||
],
|
|
||||||
"priority": "high",
|
|
||||||
"details": "1. Update `src/cli/src/config/schema.ts`:\n ```typescript\n export interface McpctlConfig {\n mcplocalUrl: string; // NEW: default 'http://localhost:3200'\n mcpdUrl: string; // Keep for reference/direct access if needed\n // ... other fields\n }\n ```\n2. Update `src/cli/src/config/defaults.ts`:\n - Change default daemonUrl to http://localhost:3200 (mcplocal)\n3. Update `src/cli/src/api-client.ts`:\n - Default baseUrl now points to mcplocal\n4. Add new config commands in `src/cli/src/commands/config.ts`:\n ```typescript\n .command('set-mcplocal-url <url>')\n .command('set-mcpd-url <url>')\n .command('get-mcplocal-url')\n .command('get-mcpd-url')\n ```\n5. Update `src/cli/src/commands/status.ts` to show both connections and auth status:\n ```\n $ mcpctl status\n mcplocal: connected (localhost:3200)\n mcpd: connected (nas.local:3100) via mcplocal\n Auth: logged in as user@example.com\n LLM Provider: ollama (llama3.2)\n Token savings: 45% (last 24h)\n ```\n6. Update CLI --daemon-url flag to point to mcplocal\n7. Add --direct flag to bypass mcplocal and talk to mcpd directly (for debugging)\n8. Create `src/cli/src/commands/auth.ts` with login/logout commands:\n - `mcpctl login`: Prompt for mcpd URL (if not configured) and credentials\n - Call POST /api/v1/auth/login with { email, password }\n - Store session token in ~/.mcpctl/credentials with 0600 permissions\n - `mcpctl logout`: Invalidate session and delete stored token\n9. Create `src/cli/src/auth/credentials.ts` for secure token storage:\n - Use fs.chmod to set 0600 permissions on credentials file\n - Token format: { token: string, mcpdUrl: string, user: string, expiresAt?: string }\n10. Update api-client.ts to include stored token in requests to mcplocal\n - mcplocal passes this token to mcpd for authentication",
|
|
||||||
"testStrategy": "1. Unit test: Default config points to mcplocal URL\n2. Unit test: Config commands update correct fields\n3. Integration test: CLI commands work through mcplocal proxy\n4. Test status command shows both mcplocal and mcpd status\n5. Test --direct flag bypasses mcplocal\n6. Test backward compatibility with existing config files\n7. Unit test: login command stores token with correct permissions (0600)\n8. Unit test: logout command removes credentials file\n9. Integration test: login flow with POST /api/v1/auth/login\n10. Test status command shows auth status (logged in as user)\n11. Test token is passed to mcplocal in API requests\n12. Test invalid credentials return appropriate error message\n13. Test expired token handling",
|
|
||||||
"subtasks": [
|
|
||||||
{
|
|
||||||
"id": 1,
|
|
||||||
"title": "Update config schema for mcplocal and mcpd URLs",
|
|
||||||
"description": "Modify McpctlConfigSchema in src/cli/src/config/schema.ts to include separate mcplocalUrl and mcpdUrl fields with appropriate defaults.",
|
|
||||||
"dependencies": [],
|
|
||||||
"details": "Update the Zod schema to add mcplocalUrl (default: http://localhost:3200) and mcpdUrl (default: http://localhost:3100). Update DEFAULT_CONFIG and ensure backward compatibility with existing daemonUrl field by mapping it to mcplocalUrl.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Unit test schema validation for new URL fields. Test default values are correct. Test backward compatibility mapping.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 2,
|
|
||||||
"title": "Create auth credentials storage module",
|
|
||||||
"description": "Create src/cli/src/auth/credentials.ts to handle secure storage and retrieval of session tokens in ~/.mcpctl/credentials.",
|
|
||||||
"dependencies": [],
|
|
||||||
"details": "Implement saveCredentials(token, mcpdUrl, user), loadCredentials(), and deleteCredentials() functions. Use fs.chmod to set 0600 permissions. Store JSON format: { token, mcpdUrl, user, expiresAt }. Handle file not found gracefully in loadCredentials.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Unit test credentials are saved with 0600 permissions. Test load returns null when file doesn't exist. Test delete removes the file.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 3,
|
|
||||||
"title": "Implement login command",
|
|
||||||
"description": "Create src/cli/src/commands/auth.ts with mcpctl login command that prompts for mcpd URL and credentials, calls POST /api/v1/auth/login, and stores the session token.",
|
|
||||||
"dependencies": [
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"details": "Use inquirer or prompts library for interactive credential input (email, password). If mcpdUrl not configured, prompt for it. Call POST /api/v1/auth/login with credentials. On success, save token using credentials module. Display 'Logged in as {user}' on success. Handle errors (invalid credentials, network errors) with clear messages.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test prompts collect correct input. Test successful login stores credentials. Test failed login shows error without storing token.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 4,
|
|
||||||
"title": "Implement logout command",
|
|
||||||
"description": "Add mcpctl logout command to auth.ts that invalidates the session and removes stored credentials.",
|
|
||||||
"dependencies": [
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"details": "Load stored credentials, optionally call a logout endpoint on mcpd to invalidate server-side session, then delete the local credentials file. Display 'Logged out successfully' or 'Not logged in' as appropriate.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test logout removes credentials file. Test logout when not logged in shows appropriate message.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 5,
|
|
||||||
"title": "Update api-client to include auth token",
|
|
||||||
"description": "Modify src/cli/src/api-client.ts to load and include stored session token in Authorization header for requests to mcplocal.",
|
|
||||||
"dependencies": [
|
|
||||||
2
|
|
||||||
],
|
|
||||||
"details": "Import loadCredentials from auth module. Add Authorization: Bearer {token} header to requests when credentials exist. Handle expired token by returning appropriate error suggesting re-login.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test requests include Authorization header when logged in. Test requests work without token when not logged in.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 6,
|
|
||||||
"title": "Update status command to show auth status",
|
|
||||||
"description": "Modify src/cli/src/commands/status.ts to display authentication status (logged in as user X or not logged in) along with mcplocal and mcpd connectivity.",
|
|
||||||
"dependencies": [
|
|
||||||
2,
|
|
||||||
5
|
|
||||||
],
|
|
||||||
"details": "Load credentials and display auth status line: 'Auth: logged in as {user}' or 'Auth: not logged in'. Update status output format to show mcplocal and mcpd status separately with the auth info.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test status shows 'logged in as user' when credentials exist. Test status shows 'not logged in' when no credentials.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 7,
|
|
||||||
"title": "Add config commands for mcplocal and mcpd URLs",
|
|
||||||
"description": "Add set-mcplocal-url, set-mcpd-url, get-mcplocal-url, and get-mcpd-url commands to src/cli/src/commands/config.ts.",
|
|
||||||
"dependencies": [
|
|
||||||
1
|
|
||||||
],
|
|
||||||
"details": "Add four new subcommands to the config command for setting and getting the mcplocal and mcpd URLs independently. Update the generic 'set' command to handle these new schema fields.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test each command correctly reads/writes the appropriate config field.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 8,
|
|
||||||
"title": "Add --direct flag for mcpd bypass",
|
|
||||||
"description": "Add --direct flag to CLI commands that bypasses mcplocal and connects directly to mcpd for debugging purposes.",
|
|
||||||
"dependencies": [
|
|
||||||
1,
|
|
||||||
5
|
|
||||||
],
|
|
||||||
"details": "Add global --direct option to the main CLI. When set, api-client uses mcpdUrl instead of mcplocalUrl. Useful for debugging connectivity issues between mcplocal and mcpd.",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test --direct flag causes requests to use mcpdUrl. Test normal operation uses mcplocalUrl.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": 9,
|
|
||||||
"title": "Register auth commands in CLI entry point",
|
|
||||||
"description": "Import and register the login and logout commands in src/cli/src/index.ts.",
|
|
||||||
"dependencies": [
|
|
||||||
3,
|
|
||||||
4
|
|
||||||
],
|
|
||||||
"details": "Import createAuthCommand from commands/auth.ts and add it to the main program with program.addCommand(createAuthCommand()).",
|
|
||||||
"status": "pending",
|
|
||||||
"testStrategy": "Test mcpctl login and mcpctl logout are available as commands.",
|
|
||||||
"parentId": "undefined"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"updatedAt": "2026-02-21T18:39:11.345Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "34",
|
|
||||||
"title": "Connect mcplocal MCP router to mcpd proxy endpoint",
|
|
||||||
"description": "Update mcplocal's MCP router to forward tool calls to mcpd's new /api/v1/mcp/proxy endpoint instead of connecting to MCP servers directly.",
|
|
||||||
"details": "1. Update `src/mcplocal/src/router.ts` to use mcpd proxy:\n ```typescript\n class Router {\n private mcpdClient: McpdClient;\n \n async handleToolsCall(request: JsonRpcRequest) {\n const { name, arguments: args } = request.params;\n const [serverName, toolName] = name.split('/');\n \n // Pre-process with LLM if enabled\n const processedArgs = this.config.enablePreprocessing\n ? await this.llmProcessor.preprocessRequest(toolName, args)\n : args;\n \n // Call mcpd proxy endpoint\n const result = await this.mcpdClient.post('/api/v1/mcp/proxy', {\n serverId: serverName,\n method: 'tools/call',\n params: { name: toolName, arguments: processedArgs }\n });\n \n // Post-process response with LLM if enabled\n return this.config.enablePreprocessing\n ? await this.llmProcessor.filterResponse(toolName, args, result)\n : result;\n }\n }\n ```\n2. Update upstream configuration:\n - Remove direct upstream connections for managed servers\n - Keep option for local/unmanaged upstreams\n3. Add server discovery from mcpd:\n ```typescript\n async refreshServerList() {\n const servers = await this.mcpdClient.get('/api/v1/servers');\n this.updateAvailableTools(servers);\n }\n ```\n4. Handle tools/list by aggregating from mcpd servers\n5. Handle resources/list and prompts/list similarly",
|
|
||||||
"testStrategy": "1. Unit test: Tool calls are forwarded to mcpd proxy correctly\n2. Unit test: Server name is extracted from namespaced tool name\n3. Integration test: Full flow Claude -> mcplocal -> mcpd -> container\n4. Test tools/list aggregates from all mcpd servers\n5. Test error handling when mcpd is unreachable\n6. Test LLM preprocessing is applied when enabled",
|
|
||||||
"priority": "high",
|
|
||||||
"dependencies": [
|
|
||||||
"28",
|
|
||||||
"29"
|
|
||||||
],
|
|
||||||
"status": "done",
|
|
||||||
"subtasks": [],
|
|
||||||
"updatedAt": "2026-02-21T18:43:14.673Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "35",
|
|
||||||
"title": "Implement health monitoring across all tiers",
|
|
||||||
"description": "Extend health monitoring to track connectivity and status across mcplocal, mcpd, and individual MCP server instances.",
|
|
||||||
"details": "1. Update mcplocal health monitor in `src/mcplocal/src/health.ts`:\n ```typescript\n export class TieredHealthMonitor {\n async checkHealth(): Promise<TieredHealthStatus> {\n return {\n mcplocal: {\n status: 'healthy',\n llmProvider: await this.checkLlmProvider(),\n uptime: process.uptime()\n },\n mcpd: await this.checkMcpdHealth(),\n instances: await this.checkInstancesHealth()\n };\n }\n \n private async checkMcpdHealth(): Promise<McpdHealth> {\n try {\n const health = await this.mcpdClient.get('/api/v1/health');\n return { status: 'connected', ...health };\n } catch {\n return { status: 'disconnected' };\n }\n }\n \n private async checkInstancesHealth(): Promise<InstanceHealth[]> {\n const instances = await this.mcpdClient.get('/api/v1/instances');\n return instances.map(i => ({\n name: i.name,\n status: i.status,\n lastHealthCheck: i.lastHealthCheck\n }));\n }\n }\n ```\n2. Add health endpoint to mcplocal HTTP server: `GET /health`\n3. Update mcpctl status command to display tiered health\n4. Add degraded state detection:\n - LLM provider unavailable but mcpd reachable\n - Some instances down but others healthy\n5. Add health event notifications for state transitions\n6. Add configurable health check intervals",
|
|
||||||
"testStrategy": "1. Unit test: Health check correctly identifies all states\n2. Unit test: Degraded state is detected correctly\n3. Integration test: Full health check across all tiers\n4. Test health endpoint returns correct format\n5. Test mcpctl status displays health correctly\n6. Test state transition events are emitted",
|
|
||||||
"priority": "medium",
|
|
||||||
"dependencies": [
|
|
||||||
"33",
|
|
||||||
"34"
|
|
||||||
],
|
|
||||||
"status": "done",
|
|
||||||
"subtasks": [],
|
|
||||||
"updatedAt": "2026-02-21T18:46:07.885Z"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "36",
|
|
||||||
"title": "End-to-end integration testing",
|
|
||||||
"description": "Create comprehensive integration tests that validate the full data flow from mcpctl through mcplocal to mcpd to MCP server containers and back.",
|
|
||||||
"details": "1. Create test fixtures in `src/mcplocal/test/fixtures/`:\n - Mock MCP server that returns predictable responses\n - Test configuration files\n - Sample tool call payloads\n2. Create integration test suite in `src/mcplocal/test/integration/`:\n ```typescript\n describe('End-to-end flow', () => {\n it('mcpctl -> mcplocal -> mcpd -> mcp_server', async () => {\n // Start mock MCP server\n // Start mcpd with test config\n // Start mcplocal pointing to mcpd\n // Execute mcpctl command\n // Verify response flows back correctly\n });\n \n it('LLM pre-processing reduces response size', async () => {\n // Send query that returns large dataset\n // Verify LLM filtering reduces token count\n // Verify relevant data is preserved\n });\n \n it('credentials never leave mcpd', async () => {\n // Monitor all traffic from mcplocal\n // Verify no credentials appear in requests/responses\n });\n });\n ```\n3. Test scenarios:\n - Management commands (get servers, instances, etc.)\n - MCP tool calls with LLM preprocessing\n - MCP tool calls without preprocessing\n - Error handling (mcpd down, instance down, LLM failure)\n - Health monitoring accuracy\n4. Add CI integration test workflow\n5. Create docker-compose.test.yml for test environment",
|
|
||||||
"testStrategy": "1. All integration tests pass in CI environment\n2. Test coverage includes happy path and error scenarios\n3. Performance benchmarks: measure latency at each tier\n4. Security test: verify credential isolation\n5. Load test: multiple concurrent requests\n6. Chaos test: random component failures",
|
|
||||||
"priority": "high",
|
|
||||||
"dependencies": [
|
|
||||||
"29",
|
|
||||||
"33",
|
|
||||||
"34",
|
|
||||||
"35"
|
|
||||||
],
|
|
||||||
"status": "done",
|
|
||||||
"subtasks": [],
|
|
||||||
"updatedAt": "2026-02-21T18:52:29.084Z"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"lastModified": "2026-02-21T18:52:29.084Z",
|
"lastModified": "2026-02-21T04:26:06.239Z",
|
||||||
"taskCount": 36,
|
"taskCount": 24,
|
||||||
"completedCount": 33,
|
"completedCount": 5,
|
||||||
"tags": [
|
"tags": [
|
||||||
"master"
|
"master"
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,69 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
cd "$(dirname "$0")"
|
|
||||||
|
|
||||||
# Load .env if present
|
|
||||||
if [ -f .env ]; then
|
|
||||||
set -a; source .env; set +a
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Ensure tools are on PATH
|
|
||||||
export PATH="$HOME/.npm-global/bin:$HOME/.bun/bin:$HOME/.local/bin:$PATH"
|
|
||||||
|
|
||||||
echo "=== mcpctl CLI build & release ==="
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# 1. Build TypeScript
|
|
||||||
echo "==> Building TypeScript..."
|
|
||||||
pnpm build
|
|
||||||
|
|
||||||
# 2. Bundle standalone binary
|
|
||||||
echo "==> Bundling standalone binary..."
|
|
||||||
mkdir -p dist
|
|
||||||
rm -f dist/mcpctl dist/mcpctl-*.rpm
|
|
||||||
bun build src/cli/src/index.ts --compile --outfile dist/mcpctl
|
|
||||||
echo " Binary: $(du -h dist/mcpctl | cut -f1)"
|
|
||||||
|
|
||||||
# 3. Package RPM
|
|
||||||
echo "==> Packaging RPM..."
|
|
||||||
nfpm pkg --packager rpm --target dist/
|
|
||||||
RPM_FILE=$(ls dist/mcpctl-*.rpm 2>/dev/null | head -1)
|
|
||||||
RPM_VERSION=$(rpm -qp --queryformat '%{VERSION}-%{RELEASE}' "$RPM_FILE")
|
|
||||||
echo " RPM: $RPM_FILE ($(du -h "$RPM_FILE" | cut -f1))"
|
|
||||||
|
|
||||||
# 4. Publish to Gitea
|
|
||||||
GITEA_URL="${GITEA_URL:-http://10.0.0.194:3012}"
|
|
||||||
GITEA_OWNER="${GITEA_OWNER:-michal}"
|
|
||||||
|
|
||||||
if [ -z "$GITEA_TOKEN" ]; then
|
|
||||||
echo ""
|
|
||||||
echo "WARNING: GITEA_TOKEN not set, skipping publish. Add it to .env"
|
|
||||||
echo ""
|
|
||||||
else
|
|
||||||
echo "==> Publishing to ${GITEA_URL}..."
|
|
||||||
EXISTING=$(curl -s -o /dev/null -w "%{http_code}" \
|
|
||||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
|
||||||
"${GITEA_URL}/api/v1/packages/${GITEA_OWNER}/rpm/mcpctl/${RPM_VERSION}")
|
|
||||||
|
|
||||||
if [ "$EXISTING" = "200" ]; then
|
|
||||||
echo " Replacing existing version $RPM_VERSION..."
|
|
||||||
curl -s -o /dev/null -X DELETE \
|
|
||||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
|
||||||
"${GITEA_URL}/api/v1/packages/${GITEA_OWNER}/rpm/mcpctl/${RPM_VERSION}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
curl --fail -s -X PUT \
|
|
||||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
|
||||||
--upload-file "$RPM_FILE" \
|
|
||||||
"${GITEA_URL}/api/packages/${GITEA_OWNER}/rpm/upload"
|
|
||||||
echo " Published!"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# 5. Install locally
|
|
||||||
echo "==> Installing..."
|
|
||||||
sudo rpm -U --force "$RPM_FILE"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "=== Done ==="
|
|
||||||
mcpctl --version
|
|
||||||
@@ -1,74 +0,0 @@
|
|||||||
_mcpctl() {
|
|
||||||
local cur prev words cword
|
|
||||||
_init_completion || return
|
|
||||||
|
|
||||||
local commands="status login logout config get describe delete logs create edit apply backup restore help"
|
|
||||||
local global_opts="-v --version --daemon-url --direct -h --help"
|
|
||||||
local resources="servers instances secrets templates projects users groups rbac"
|
|
||||||
|
|
||||||
case "${words[1]}" in
|
|
||||||
config)
|
|
||||||
if [[ $cword -eq 2 ]]; then
|
|
||||||
COMPREPLY=($(compgen -W "view set path reset claude-generate impersonate help" -- "$cur"))
|
|
||||||
fi
|
|
||||||
return ;;
|
|
||||||
status)
|
|
||||||
COMPREPLY=($(compgen -W "-h --help" -- "$cur"))
|
|
||||||
return ;;
|
|
||||||
login)
|
|
||||||
COMPREPLY=($(compgen -W "--url --email --password -h --help" -- "$cur"))
|
|
||||||
return ;;
|
|
||||||
logout)
|
|
||||||
return ;;
|
|
||||||
get)
|
|
||||||
if [[ $cword -eq 2 ]]; then
|
|
||||||
COMPREPLY=($(compgen -W "$resources" -- "$cur"))
|
|
||||||
else
|
|
||||||
COMPREPLY=($(compgen -W "-o --output -h --help" -- "$cur"))
|
|
||||||
fi
|
|
||||||
return ;;
|
|
||||||
describe)
|
|
||||||
if [[ $cword -eq 2 ]]; then
|
|
||||||
COMPREPLY=($(compgen -W "$resources" -- "$cur"))
|
|
||||||
else
|
|
||||||
COMPREPLY=($(compgen -W "-o --output --show-values -h --help" -- "$cur"))
|
|
||||||
fi
|
|
||||||
return ;;
|
|
||||||
delete)
|
|
||||||
if [[ $cword -eq 2 ]]; then
|
|
||||||
COMPREPLY=($(compgen -W "$resources" -- "$cur"))
|
|
||||||
fi
|
|
||||||
return ;;
|
|
||||||
edit)
|
|
||||||
if [[ $cword -eq 2 ]]; then
|
|
||||||
COMPREPLY=($(compgen -W "servers projects" -- "$cur"))
|
|
||||||
fi
|
|
||||||
return ;;
|
|
||||||
logs)
|
|
||||||
COMPREPLY=($(compgen -W "--tail --since -f --follow -h --help" -- "$cur"))
|
|
||||||
return ;;
|
|
||||||
create)
|
|
||||||
if [[ $cword -eq 2 ]]; then
|
|
||||||
COMPREPLY=($(compgen -W "server secret project user group rbac help" -- "$cur"))
|
|
||||||
fi
|
|
||||||
return ;;
|
|
||||||
apply)
|
|
||||||
COMPREPLY=($(compgen -f -- "$cur"))
|
|
||||||
return ;;
|
|
||||||
backup)
|
|
||||||
COMPREPLY=($(compgen -W "-o --output -p --password -h --help" -- "$cur"))
|
|
||||||
return ;;
|
|
||||||
restore)
|
|
||||||
COMPREPLY=($(compgen -W "-i --input -p --password -c --conflict -h --help" -- "$cur"))
|
|
||||||
return ;;
|
|
||||||
help)
|
|
||||||
COMPREPLY=($(compgen -W "$commands" -- "$cur"))
|
|
||||||
return ;;
|
|
||||||
esac
|
|
||||||
|
|
||||||
if [[ $cword -eq 1 ]]; then
|
|
||||||
COMPREPLY=($(compgen -W "$commands $global_opts" -- "$cur"))
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
complete -F _mcpctl mcpctl
|
|
||||||
@@ -1,82 +0,0 @@
|
|||||||
# mcpctl fish completions
|
|
||||||
|
|
||||||
set -l commands status login logout config get describe delete logs create edit apply backup restore help
|
|
||||||
|
|
||||||
# Disable file completions by default
|
|
||||||
complete -c mcpctl -f
|
|
||||||
|
|
||||||
# Global options
|
|
||||||
complete -c mcpctl -s v -l version -d 'Show version'
|
|
||||||
complete -c mcpctl -l daemon-url -d 'mcplocal daemon URL' -x
|
|
||||||
complete -c mcpctl -l direct -d 'Bypass mcplocal, connect directly to mcpd'
|
|
||||||
complete -c mcpctl -s h -l help -d 'Show help'
|
|
||||||
|
|
||||||
# Top-level commands
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a status -d 'Show status and connectivity'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a login -d 'Authenticate with mcpd'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a logout -d 'Log out'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a config -d 'Manage configuration'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a get -d 'List resources'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a describe -d 'Show resource details'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a delete -d 'Delete a resource'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a logs -d 'Get instance logs'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a create -d 'Create a resource'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a edit -d 'Edit a resource'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a apply -d 'Apply configuration from file'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a backup -d 'Backup configuration'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a restore -d 'Restore from backup'
|
|
||||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a help -d 'Show help'
|
|
||||||
|
|
||||||
# Resource types for get/describe/delete/edit
|
|
||||||
set -l resources servers instances secrets templates projects users groups rbac
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from get describe delete" -a "$resources" -d 'Resource type'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from edit" -a 'servers projects' -d 'Resource type'
|
|
||||||
|
|
||||||
# get/describe/delete options
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from get" -s o -l output -d 'Output format' -xa 'table json yaml'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from describe" -s o -l output -d 'Output format' -xa 'detail json yaml'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from describe" -l show-values -d 'Show secret values'
|
|
||||||
|
|
||||||
# login options
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from login" -l url -d 'mcpd URL' -x
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from login" -l email -d 'Email address' -x
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from login" -l password -d 'Password' -x
|
|
||||||
|
|
||||||
# config subcommands
|
|
||||||
set -l config_cmds view set path reset claude-generate impersonate
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a view -d 'Show configuration'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a set -d 'Set a config value'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a path -d 'Show config file path'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a reset -d 'Reset to defaults'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a claude-generate -d 'Generate .mcp.json'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a impersonate -d 'Impersonate a user'
|
|
||||||
|
|
||||||
# create subcommands
|
|
||||||
set -l create_cmds server secret project user group rbac
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a server -d 'Create a server'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a secret -d 'Create a secret'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a project -d 'Create a project'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a user -d 'Create a user'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a group -d 'Create a group'
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a rbac -d 'Create an RBAC binding'
|
|
||||||
|
|
||||||
# logs options
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from logs" -l tail -d 'Number of lines' -x
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from logs" -l since -d 'Since timestamp' -x
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from logs" -s f -l follow -d 'Follow log output'
|
|
||||||
|
|
||||||
# backup options
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from backup" -s o -l output -d 'Output file' -rF
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from backup" -s p -l password -d 'Encryption password' -x
|
|
||||||
|
|
||||||
# restore options
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from restore" -s i -l input -d 'Input file' -rF
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from restore" -s p -l password -d 'Decryption password' -x
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from restore" -s c -l conflict -d 'Conflict strategy' -xa 'skip overwrite fail'
|
|
||||||
|
|
||||||
# apply takes a file
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from apply" -s f -l file -d 'Configuration file' -rF
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from apply" -F
|
|
||||||
|
|
||||||
# help completions
|
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from help" -a "$commands"
|
|
||||||
398
deploy.sh
398
deploy.sh
@@ -1,398 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Deploy mcpctl stack to Portainer
|
|
||||||
# Usage: ./deploy.sh [--dry-run]
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
||||||
STACK_DIR="$SCRIPT_DIR/stack"
|
|
||||||
COMPOSE_FILE="$STACK_DIR/docker-compose.yml"
|
|
||||||
ENV_FILE="$STACK_DIR/.env"
|
|
||||||
|
|
||||||
# Portainer configuration
|
|
||||||
PORTAINER_URL="${PORTAINER_URL:-http://10.0.0.194:9000}"
|
|
||||||
PORTAINER_USER="${PORTAINER_USER:-michal}"
|
|
||||||
STACK_NAME="mcpctl"
|
|
||||||
ENDPOINT_ID="2"
|
|
||||||
|
|
||||||
# Colors for output
|
|
||||||
RED='\033[0;31m'
|
|
||||||
GREEN='\033[0;32m'
|
|
||||||
YELLOW='\033[1;33m'
|
|
||||||
NC='\033[0m'
|
|
||||||
|
|
||||||
log_info() { echo -e "${GREEN}[INFO]${NC} $1" >&2; }
|
|
||||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1" >&2; }
|
|
||||||
log_error() { echo -e "${RED}[ERROR]${NC} $1" >&2; }
|
|
||||||
|
|
||||||
check_files() {
|
|
||||||
if [[ ! -f "$COMPOSE_FILE" ]]; then
|
|
||||||
log_error "Compose file not found: $COMPOSE_FILE"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
if [[ ! -f "$ENV_FILE" ]]; then
|
|
||||||
log_error "Environment file not found: $ENV_FILE"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
log_info "Found compose file: $COMPOSE_FILE"
|
|
||||||
log_info "Found env file: $ENV_FILE"
|
|
||||||
}
|
|
||||||
|
|
||||||
get_password() {
|
|
||||||
if [[ -n "$PORTAINER_PASSWORD" ]]; then
|
|
||||||
echo "$PORTAINER_PASSWORD"
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
if [[ -f "$SCRIPT_DIR/.portainer_password" ]]; then
|
|
||||||
cat "$SCRIPT_DIR/.portainer_password"
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
if [[ -f "$HOME/.portainer_password" ]]; then
|
|
||||||
cat "$HOME/.portainer_password"
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
read -s -p "Enter Portainer password for $PORTAINER_USER: " password
|
|
||||||
echo >&2
|
|
||||||
echo "$password"
|
|
||||||
}
|
|
||||||
|
|
||||||
get_jwt_token() {
|
|
||||||
local password="$1"
|
|
||||||
log_info "Authenticating to Portainer..."
|
|
||||||
|
|
||||||
local escaped_password
|
|
||||||
escaped_password=$(printf '%s' "$password" | jq -Rs .)
|
|
||||||
|
|
||||||
local response
|
|
||||||
response=$(curl -s -X POST "$PORTAINER_URL/api/auth" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d "{\"Username\":\"$PORTAINER_USER\",\"Password\":$escaped_password}")
|
|
||||||
|
|
||||||
local token
|
|
||||||
token=$(echo "$response" | jq -r '.jwt // empty')
|
|
||||||
|
|
||||||
if [[ -z "$token" ]]; then
|
|
||||||
log_error "Authentication failed: $(echo "$response" | jq -r '.message // "Unknown error"')"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
echo "$token"
|
|
||||||
}
|
|
||||||
|
|
||||||
parse_env_to_json() {
|
|
||||||
local env_file="$1"
|
|
||||||
local json_array="["
|
|
||||||
local first=true
|
|
||||||
|
|
||||||
while IFS= read -r line || [[ -n "$line" ]]; do
|
|
||||||
[[ "$line" =~ ^#.*$ ]] && continue
|
|
||||||
[[ -z "$line" ]] && continue
|
|
||||||
|
|
||||||
local name="${line%%=*}"
|
|
||||||
local value="${line#*=}"
|
|
||||||
[[ "$name" == "$line" ]] && continue
|
|
||||||
|
|
||||||
if [[ "$first" == "true" ]]; then
|
|
||||||
first=false
|
|
||||||
else
|
|
||||||
json_array+=","
|
|
||||||
fi
|
|
||||||
|
|
||||||
value=$(echo "$value" | sed 's/\\/\\\\/g; s/"/\\"/g')
|
|
||||||
json_array+="{\"name\":\"$name\",\"value\":\"$value\"}"
|
|
||||||
done < "$env_file"
|
|
||||||
|
|
||||||
json_array+="]"
|
|
||||||
echo "$json_array"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Find existing stack by name
|
|
||||||
find_stack_id() {
|
|
||||||
local token="$1"
|
|
||||||
local response
|
|
||||||
response=$(curl -s -X GET "$PORTAINER_URL/api/stacks" \
|
|
||||||
-H "Authorization: Bearer $token")
|
|
||||||
|
|
||||||
echo "$response" | jq -r --arg name "$STACK_NAME" \
|
|
||||||
'.[] | select(.Name == $name) | .Id // empty'
|
|
||||||
}
|
|
||||||
|
|
||||||
get_stack_info() {
|
|
||||||
local token="$1"
|
|
||||||
local stack_id="$2"
|
|
||||||
curl -s -X GET "$PORTAINER_URL/api/stacks/$stack_id" \
|
|
||||||
-H "Authorization: Bearer $token" \
|
|
||||||
-H "Content-Type: application/json"
|
|
||||||
}
|
|
||||||
|
|
||||||
get_stack_file() {
|
|
||||||
local token="$1"
|
|
||||||
local stack_id="$2"
|
|
||||||
local response
|
|
||||||
response=$(curl -s -X GET "$PORTAINER_URL/api/stacks/$stack_id/file" \
|
|
||||||
-H "Authorization: Bearer $token" \
|
|
||||||
-H "Content-Type: application/json")
|
|
||||||
|
|
||||||
if echo "$response" | jq -e '.StackFileContent' > /dev/null 2>&1; then
|
|
||||||
echo "$response" | jq -r '.StackFileContent'
|
|
||||||
else
|
|
||||||
echo "# Could not retrieve current compose file"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
show_diff() {
|
|
||||||
local token="$1"
|
|
||||||
local stack_id="$2"
|
|
||||||
local env_json="$3"
|
|
||||||
|
|
||||||
log_info "Fetching current state from Portainer..."
|
|
||||||
|
|
||||||
local current_compose
|
|
||||||
current_compose=$(get_stack_file "$token" "$stack_id")
|
|
||||||
|
|
||||||
local current_env
|
|
||||||
local stack_info
|
|
||||||
stack_info=$(get_stack_info "$token" "$stack_id")
|
|
||||||
current_env=$(echo "$stack_info" | jq -r 'if .Env then .Env[] | "\(.name)=\(.value)" else empty end' 2>/dev/null | sort)
|
|
||||||
|
|
||||||
local new_env
|
|
||||||
new_env=$(echo "$env_json" | jq -r '.[] | "\(.name)=\(.value)"' | sort)
|
|
||||||
|
|
||||||
local tmp_dir
|
|
||||||
tmp_dir=$(mktemp -d)
|
|
||||||
|
|
||||||
echo "$current_compose" > "$tmp_dir/current_compose.yml"
|
|
||||||
cat "$COMPOSE_FILE" > "$tmp_dir/new_compose.yml"
|
|
||||||
echo "$current_env" > "$tmp_dir/current_env.txt"
|
|
||||||
echo "$new_env" > "$tmp_dir/new_env.txt"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "=== ENVIRONMENT VARIABLES DIFF ==="
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
if diff -u "$tmp_dir/current_env.txt" "$tmp_dir/new_env.txt" > "$tmp_dir/env_diff.txt" 2>&1; then
|
|
||||||
echo -e "${GREEN}No changes in environment variables${NC}"
|
|
||||||
else
|
|
||||||
while IFS= read -r line; do
|
|
||||||
if [[ "$line" == ---* ]] || [[ "$line" == +++* ]] || [[ "$line" == @@* ]]; then
|
|
||||||
echo -e "${YELLOW}$line${NC}"
|
|
||||||
elif [[ "$line" == -* ]]; then
|
|
||||||
echo -e "${RED}$line${NC}"
|
|
||||||
elif [[ "$line" == +* ]]; then
|
|
||||||
echo -e "${GREEN}$line${NC}"
|
|
||||||
else
|
|
||||||
echo "$line"
|
|
||||||
fi
|
|
||||||
done < "$tmp_dir/env_diff.txt"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "=== COMPOSE FILE DIFF ==="
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
if diff -u "$tmp_dir/current_compose.yml" "$tmp_dir/new_compose.yml" > "$tmp_dir/compose_diff.txt" 2>&1; then
|
|
||||||
echo -e "${GREEN}No changes in compose file${NC}"
|
|
||||||
else
|
|
||||||
while IFS= read -r line; do
|
|
||||||
if [[ "$line" == ---* ]] || [[ "$line" == +++* ]] || [[ "$line" == @@* ]]; then
|
|
||||||
echo -e "${YELLOW}$line${NC}"
|
|
||||||
elif [[ "$line" == -* ]]; then
|
|
||||||
echo -e "${RED}$line${NC}"
|
|
||||||
elif [[ "$line" == +* ]]; then
|
|
||||||
echo -e "${GREEN}$line${NC}"
|
|
||||||
else
|
|
||||||
echo "$line"
|
|
||||||
fi
|
|
||||||
done < "$tmp_dir/compose_diff.txt"
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -rf "$tmp_dir"
|
|
||||||
}
|
|
||||||
|
|
||||||
create_stack() {
|
|
||||||
local token="$1"
|
|
||||||
local env_json="$2"
|
|
||||||
|
|
||||||
local compose_content
|
|
||||||
compose_content=$(cat "$COMPOSE_FILE")
|
|
||||||
|
|
||||||
local compose_escaped
|
|
||||||
compose_escaped=$(echo "$compose_content" | jq -Rs .)
|
|
||||||
|
|
||||||
log_info "Creating new stack '$STACK_NAME'..."
|
|
||||||
|
|
||||||
local payload
|
|
||||||
payload=$(jq -n \
|
|
||||||
--arg name "$STACK_NAME" \
|
|
||||||
--argjson env "$env_json" \
|
|
||||||
--argjson stackFileContent "$compose_escaped" \
|
|
||||||
'{
|
|
||||||
"name": $name,
|
|
||||||
"env": $env,
|
|
||||||
"stackFileContent": $stackFileContent
|
|
||||||
}')
|
|
||||||
|
|
||||||
local response
|
|
||||||
response=$(curl -s -X POST "$PORTAINER_URL/api/stacks?type=2&method=string&endpointId=$ENDPOINT_ID" \
|
|
||||||
-H "Authorization: Bearer $token" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d "$payload")
|
|
||||||
|
|
||||||
local error_msg
|
|
||||||
error_msg=$(echo "$response" | jq -r '.message // empty')
|
|
||||||
|
|
||||||
if [[ -n "$error_msg" ]]; then
|
|
||||||
log_error "Stack creation failed: $error_msg"
|
|
||||||
echo "$response" | jq .
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
local new_id
|
|
||||||
new_id=$(echo "$response" | jq -r '.Id')
|
|
||||||
log_info "Stack created successfully! (ID: $new_id)"
|
|
||||||
echo "$response" | jq '{Id, Name, Status, CreationDate}'
|
|
||||||
}
|
|
||||||
|
|
||||||
update_stack() {
|
|
||||||
local token="$1"
|
|
||||||
local stack_id="$2"
|
|
||||||
local dry_run="$3"
|
|
||||||
|
|
||||||
local compose_content
|
|
||||||
compose_content=$(cat "$COMPOSE_FILE")
|
|
||||||
|
|
||||||
local env_json
|
|
||||||
env_json=$(parse_env_to_json "$ENV_FILE")
|
|
||||||
|
|
||||||
if [[ "$dry_run" == "true" ]]; then
|
|
||||||
log_warn "DRY RUN - Not actually deploying"
|
|
||||||
show_diff "$token" "$stack_id" "$env_json"
|
|
||||||
echo ""
|
|
||||||
log_warn "DRY RUN complete - no changes made"
|
|
||||||
log_info "Run without --dry-run to apply these changes"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
local env_count
|
|
||||||
env_count=$(echo "$env_json" | jq 'length')
|
|
||||||
log_info "Deploying $env_count environment variables"
|
|
||||||
log_info "Updating stack '$STACK_NAME' (ID: $stack_id)..."
|
|
||||||
|
|
||||||
local compose_escaped
|
|
||||||
compose_escaped=$(echo "$compose_content" | jq -Rs .)
|
|
||||||
|
|
||||||
local payload
|
|
||||||
payload=$(jq -n \
|
|
||||||
--argjson env "$env_json" \
|
|
||||||
--argjson stackFileContent "$compose_escaped" \
|
|
||||||
'{
|
|
||||||
"env": $env,
|
|
||||||
"stackFileContent": $stackFileContent,
|
|
||||||
"prune": true,
|
|
||||||
"pullImage": true
|
|
||||||
}')
|
|
||||||
|
|
||||||
local response
|
|
||||||
response=$(curl -s -X PUT "$PORTAINER_URL/api/stacks/$stack_id?endpointId=$ENDPOINT_ID" \
|
|
||||||
-H "Authorization: Bearer $token" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d "$payload")
|
|
||||||
|
|
||||||
local error_msg
|
|
||||||
error_msg=$(echo "$response" | jq -r '.message // empty')
|
|
||||||
|
|
||||||
if [[ -n "$error_msg" ]]; then
|
|
||||||
log_error "Deployment failed: $error_msg"
|
|
||||||
echo "$response" | jq .
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
log_info "Stack updated successfully!"
|
|
||||||
echo "$response" | jq '{Id, Name, Status, CreationDate, UpdateDate}'
|
|
||||||
}
|
|
||||||
|
|
||||||
main() {
|
|
||||||
local dry_run=false
|
|
||||||
|
|
||||||
while [[ $# -gt 0 ]]; do
|
|
||||||
case $1 in
|
|
||||||
--dry-run)
|
|
||||||
dry_run=true
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--help|-h)
|
|
||||||
echo "Usage: $0 [--dry-run]"
|
|
||||||
echo ""
|
|
||||||
echo "Deploy mcpctl stack to Portainer"
|
|
||||||
echo ""
|
|
||||||
echo "Options:"
|
|
||||||
echo " --dry-run Show what would be deployed without actually deploying"
|
|
||||||
echo " --help Show this help message"
|
|
||||||
echo ""
|
|
||||||
echo "Environment variables:"
|
|
||||||
echo " PORTAINER_URL Portainer URL (default: http://10.0.0.194:9000)"
|
|
||||||
echo " PORTAINER_USER Portainer username (default: michal)"
|
|
||||||
echo " PORTAINER_PASSWORD Portainer password (or store in ~/.portainer_password)"
|
|
||||||
exit 0
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
log_error "Unknown option: $1"
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "========================================"
|
|
||||||
echo " mcpctl Stack Deployment"
|
|
||||||
echo "========================================"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
check_files
|
|
||||||
|
|
||||||
local password
|
|
||||||
password=$(get_password)
|
|
||||||
|
|
||||||
local token
|
|
||||||
token=$(get_jwt_token "$password")
|
|
||||||
log_info "Authentication successful"
|
|
||||||
|
|
||||||
# Find or create stack
|
|
||||||
local stack_id
|
|
||||||
stack_id=$(find_stack_id "$token")
|
|
||||||
|
|
||||||
if [[ -z "$stack_id" ]]; then
|
|
||||||
if [[ "$dry_run" == "true" ]]; then
|
|
||||||
log_warn "Stack '$STACK_NAME' does not exist yet"
|
|
||||||
log_info "A real deploy would create it"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
log_info "Stack '$STACK_NAME' not found, creating..."
|
|
||||||
local env_json
|
|
||||||
env_json=$(parse_env_to_json "$ENV_FILE")
|
|
||||||
create_stack "$token" "$env_json"
|
|
||||||
else
|
|
||||||
local stack_info
|
|
||||||
stack_info=$(get_stack_info "$token" "$stack_id")
|
|
||||||
local status_code
|
|
||||||
status_code=$(echo "$stack_info" | jq -r '.Status // 0')
|
|
||||||
local status_text="Unknown"
|
|
||||||
case "$status_code" in
|
|
||||||
1) status_text="Active" ;;
|
|
||||||
2) status_text="Inactive" ;;
|
|
||||||
esac
|
|
||||||
log_info "Current stack status: $status_text (ID: $stack_id, Env vars: $(echo "$stack_info" | jq '.Env | length'))"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
update_stack "$token" "$stack_id" "$dry_run"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
log_info "Done!"
|
|
||||||
|
|
||||||
if [[ "$dry_run" == "false" ]]; then
|
|
||||||
log_info "Check Portainer UI to verify containers are running"
|
|
||||||
log_info "URL: $PORTAINER_URL/#!/$ENDPOINT_ID/docker/stacks/$STACK_NAME"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
main "$@"
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
# Stage 1: Build TypeScript
|
|
||||||
FROM node:20-alpine AS builder
|
|
||||||
|
|
||||||
RUN corepack enable && corepack prepare pnpm@9.15.0 --activate
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Copy workspace config and package manifests
|
|
||||||
COPY pnpm-workspace.yaml pnpm-lock.yaml package.json tsconfig.base.json ./
|
|
||||||
COPY src/mcpd/package.json src/mcpd/tsconfig.json src/mcpd/
|
|
||||||
COPY src/db/package.json src/db/tsconfig.json src/db/
|
|
||||||
COPY src/shared/package.json src/shared/tsconfig.json src/shared/
|
|
||||||
|
|
||||||
# Install all dependencies
|
|
||||||
RUN pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
# Copy source code
|
|
||||||
COPY src/mcpd/src/ src/mcpd/src/
|
|
||||||
COPY src/db/src/ src/db/src/
|
|
||||||
COPY src/db/prisma/ src/db/prisma/
|
|
||||||
COPY src/shared/src/ src/shared/src/
|
|
||||||
|
|
||||||
# Generate Prisma client and build TypeScript
|
|
||||||
RUN pnpm -F @mcpctl/db db:generate
|
|
||||||
RUN pnpm -F @mcpctl/shared build && pnpm -F @mcpctl/db build && pnpm -F @mcpctl/mcpd build
|
|
||||||
|
|
||||||
# Stage 2: Production runtime
|
|
||||||
FROM node:20-alpine
|
|
||||||
|
|
||||||
RUN corepack enable && corepack prepare pnpm@9.15.0 --activate
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Copy workspace config, manifests, and lockfile
|
|
||||||
COPY pnpm-workspace.yaml pnpm-lock.yaml package.json ./
|
|
||||||
COPY src/mcpd/package.json src/mcpd/
|
|
||||||
COPY src/db/package.json src/db/
|
|
||||||
COPY src/shared/package.json src/shared/
|
|
||||||
|
|
||||||
# Install all deps (prisma CLI needed at runtime for db push)
|
|
||||||
RUN pnpm install --frozen-lockfile
|
|
||||||
|
|
||||||
# Copy prisma schema and generate client
|
|
||||||
COPY src/db/prisma/ src/db/prisma/
|
|
||||||
RUN pnpm -F @mcpctl/db db:generate
|
|
||||||
|
|
||||||
# Copy built output from builder
|
|
||||||
COPY --from=builder /app/src/shared/dist/ src/shared/dist/
|
|
||||||
COPY --from=builder /app/src/db/dist/ src/db/dist/
|
|
||||||
COPY --from=builder /app/src/mcpd/dist/ src/mcpd/dist/
|
|
||||||
|
|
||||||
# Copy templates for seeding
|
|
||||||
COPY templates/ templates/
|
|
||||||
|
|
||||||
# Copy entrypoint
|
|
||||||
COPY deploy/entrypoint.sh /entrypoint.sh
|
|
||||||
RUN chmod +x /entrypoint.sh
|
|
||||||
|
|
||||||
EXPOSE 3100
|
|
||||||
|
|
||||||
HEALTHCHECK --interval=10s --timeout=5s --retries=3 --start-period=10s \
|
|
||||||
CMD wget -q --spider http://localhost:3100/healthz || exit 1
|
|
||||||
|
|
||||||
ENTRYPOINT ["/entrypoint.sh"]
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
# Base container for npm-based MCP servers (STDIO transport).
|
|
||||||
# mcpd uses this image to run `npx -y <packageName>` when a server
|
|
||||||
# has packageName but no dockerImage.
|
|
||||||
# Using slim (Debian) instead of alpine for better npm package compatibility.
|
|
||||||
FROM node:20-slim
|
|
||||||
|
|
||||||
WORKDIR /mcp
|
|
||||||
|
|
||||||
# Pre-warm npx cache directory
|
|
||||||
RUN mkdir -p /root/.npm
|
|
||||||
|
|
||||||
# Default entrypoint — overridden by mcpd via container command
|
|
||||||
ENTRYPOINT ["npx", "-y"]
|
|
||||||
@@ -15,50 +15,6 @@ services:
|
|||||||
interval: 5s
|
interval: 5s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
networks:
|
|
||||||
- mcpctl
|
|
||||||
|
|
||||||
mcpd:
|
|
||||||
build:
|
|
||||||
context: ..
|
|
||||||
dockerfile: deploy/Dockerfile.mcpd
|
|
||||||
container_name: mcpctl-mcpd
|
|
||||||
ports:
|
|
||||||
- "3100:3100"
|
|
||||||
environment:
|
|
||||||
DATABASE_URL: postgresql://mcpctl:mcpctl_dev@postgres:5432/mcpctl
|
|
||||||
MCPD_PORT: "3100"
|
|
||||||
MCPD_HOST: "0.0.0.0"
|
|
||||||
MCPD_LOG_LEVEL: info
|
|
||||||
MCPD_NODE_RUNNER_IMAGE: mcpctl-node-runner:latest
|
|
||||||
MCPD_MCP_NETWORK: mcp-servers
|
|
||||||
depends_on:
|
|
||||||
postgres:
|
|
||||||
condition: service_healthy
|
|
||||||
volumes:
|
|
||||||
# Mount container runtime socket (Docker or Podman)
|
|
||||||
# For Docker: /var/run/docker.sock
|
|
||||||
# For Podman: /run/user/<UID>/podman/podman.sock
|
|
||||||
- ${CONTAINER_SOCK:-/var/run/docker.sock}:/var/run/docker.sock
|
|
||||||
networks:
|
|
||||||
- mcpctl
|
|
||||||
- mcp-servers
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "wget -q --spider http://localhost:3100/healthz || exit 1"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 5s
|
|
||||||
retries: 3
|
|
||||||
start_period: 10s
|
|
||||||
|
|
||||||
# Base image for npm-based MCP servers (built once, used by mcpd)
|
|
||||||
node-runner:
|
|
||||||
build:
|
|
||||||
context: ..
|
|
||||||
dockerfile: deploy/Dockerfile.node-runner
|
|
||||||
image: mcpctl-node-runner:latest
|
|
||||||
profiles:
|
|
||||||
- build
|
|
||||||
entrypoint: ["echo", "Image built successfully"]
|
|
||||||
|
|
||||||
postgres-test:
|
postgres-test:
|
||||||
image: postgres:16-alpine
|
image: postgres:16-alpine
|
||||||
@@ -76,18 +32,6 @@ services:
|
|||||||
interval: 5s
|
interval: 5s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
profiles:
|
|
||||||
- test
|
|
||||||
|
|
||||||
networks:
|
|
||||||
mcpctl:
|
|
||||||
driver: bridge
|
|
||||||
mcp-servers:
|
|
||||||
name: mcp-servers
|
|
||||||
driver: bridge
|
|
||||||
# Not internal — MCP servers need outbound access to reach external APIs
|
|
||||||
# (e.g., Grafana, Home Assistant). Isolation is enforced by not binding
|
|
||||||
# host ports on MCP server containers; only mcpd can reach them.
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
mcpctl-pgdata:
|
mcpctl-pgdata:
|
||||||
|
|||||||
@@ -1,11 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
set -e
|
|
||||||
|
|
||||||
echo "mcpd: pushing database schema..."
|
|
||||||
pnpm -F @mcpctl/db exec prisma db push --schema=prisma/schema.prisma --accept-data-loss 2>&1
|
|
||||||
|
|
||||||
echo "mcpd: seeding templates..."
|
|
||||||
TEMPLATES_DIR=templates node src/mcpd/dist/seed-runner.js
|
|
||||||
|
|
||||||
echo "mcpd: starting server..."
|
|
||||||
exec node src/mcpd/dist/main.js
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
[Unit]
|
|
||||||
Description=mcpctl local MCP proxy
|
|
||||||
After=network.target
|
|
||||||
|
|
||||||
[Service]
|
|
||||||
Type=simple
|
|
||||||
ExecStart=/usr/bin/mcpctl-local
|
|
||||||
Restart=on-failure
|
|
||||||
RestartSec=5
|
|
||||||
Environment=MCPLOCAL_MCPD_URL=http://10.0.0.194:3100
|
|
||||||
Environment=MCPLOCAL_HTTP_PORT=3200
|
|
||||||
Environment=MCPLOCAL_HTTP_HOST=127.0.0.1
|
|
||||||
|
|
||||||
[Install]
|
|
||||||
WantedBy=default.target
|
|
||||||
@@ -1,149 +0,0 @@
|
|||||||
# mcpctl Architecture
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
mcpctl is a kubectl-like management tool for MCP (Model Context Protocol) servers. It consists of a CLI, a daemon server, a database layer, a local proxy, and shared utilities.
|
|
||||||
|
|
||||||
## Package Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
src/
|
|
||||||
├── cli/ @mcpctl/cli - Command-line interface
|
|
||||||
├── mcpd/ @mcpctl/mcpd - Daemon server (REST API)
|
|
||||||
├── db/ @mcpctl/db - Database layer (Prisma + PostgreSQL)
|
|
||||||
├── local-proxy/ @mcpctl/local-proxy - MCP protocol proxy
|
|
||||||
└── shared/ @mcpctl/shared - Shared constants and utilities
|
|
||||||
```
|
|
||||||
|
|
||||||
## Component Diagram
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────┐ HTTP ┌──────────────┐ Prisma ┌────────────┐
|
|
||||||
│ mcpctl CLI │ ──────────────│ mcpd │ ──────────────│ PostgreSQL │
|
|
||||||
│ (Commander.js) │ │ (Fastify 5) │ │ │
|
|
||||||
└─────────────────┘ └──────┬───────┘ └────────────┘
|
|
||||||
│
|
|
||||||
│ Docker/Podman API
|
|
||||||
▼
|
|
||||||
┌──────────────┐
|
|
||||||
│ Containers │
|
|
||||||
│ (MCP servers)│
|
|
||||||
└──────────────┘
|
|
||||||
|
|
||||||
┌─────────────────┐ STDIO ┌──────────────┐ STDIO/HTTP ┌────────────┐
|
|
||||||
│ Claude / LLM │ ────────────│ local-proxy │ ──────────────│ MCP Servers│
|
|
||||||
│ │ │ (McpRouter) │ │ │
|
|
||||||
└─────────────────┘ └──────────────┘ └────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
## CLI (`@mcpctl/cli`)
|
|
||||||
|
|
||||||
The CLI is built with Commander.js and communicates with mcpd via HTTP REST.
|
|
||||||
|
|
||||||
### Commands
|
|
||||||
|
|
||||||
| Command | Description |
|
|
||||||
|---------|-------------|
|
|
||||||
| `mcpctl get <resource>` | List resources (servers, profiles, projects, instances) |
|
|
||||||
| `mcpctl describe <resource> <id>` | Show detailed resource info |
|
|
||||||
| `mcpctl apply <file>` | Apply declarative YAML/JSON configuration |
|
|
||||||
| `mcpctl setup [name]` | Interactive server setup wizard |
|
|
||||||
| `mcpctl instance list/start/stop/restart/remove/logs/inspect` | Manage instances |
|
|
||||||
| `mcpctl claude generate/show/add/remove` | Manage .mcp.json files |
|
|
||||||
| `mcpctl project list/create/delete/show/profiles/set-profiles` | Manage projects |
|
|
||||||
| `mcpctl config get/set/path` | Manage CLI configuration |
|
|
||||||
| `mcpctl status` | Check daemon connectivity |
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
|
|
||||||
CLI config is stored at `~/.config/mcpctl/config.json` with:
|
|
||||||
- `daemonUrl`: mcpd server URL (default: `http://localhost:4444`)
|
|
||||||
|
|
||||||
## Daemon (`@mcpctl/mcpd`)
|
|
||||||
|
|
||||||
Fastify 5-based REST API server that manages MCP server lifecycle.
|
|
||||||
|
|
||||||
### Layers
|
|
||||||
|
|
||||||
1. **Routes** - HTTP handlers, parameter extraction
|
|
||||||
2. **Services** - Business logic, validation (Zod schemas), error handling
|
|
||||||
3. **Repositories** - Data access via Prisma (interface-based for testability)
|
|
||||||
|
|
||||||
### API Endpoints
|
|
||||||
|
|
||||||
| Endpoint | Methods | Description |
|
|
||||||
|----------|---------|-------------|
|
|
||||||
| `/api/v1/servers` | GET, POST | MCP server definitions |
|
|
||||||
| `/api/v1/servers/:id` | GET, PUT, DELETE | Single server operations |
|
|
||||||
| `/api/v1/profiles` | GET, POST | Server configuration profiles |
|
|
||||||
| `/api/v1/profiles/:id` | GET, PUT, DELETE | Single profile operations |
|
|
||||||
| `/api/v1/projects` | GET, POST | Project management |
|
|
||||||
| `/api/v1/projects/:id` | GET, PUT, DELETE | Single project operations |
|
|
||||||
| `/api/v1/projects/:id/profiles` | GET, PUT | Project profile assignments |
|
|
||||||
| `/api/v1/projects/:id/mcp-config` | GET | Generate .mcp.json |
|
|
||||||
| `/api/v1/instances` | GET, POST | Instance lifecycle |
|
|
||||||
| `/api/v1/instances/:id` | GET, DELETE | Instance operations |
|
|
||||||
| `/api/v1/instances/:id/stop` | POST | Stop instance |
|
|
||||||
| `/api/v1/instances/:id/restart` | POST | Restart instance |
|
|
||||||
| `/api/v1/instances/:id/inspect` | GET | Container inspection |
|
|
||||||
| `/api/v1/instances/:id/logs` | GET | Container logs |
|
|
||||||
| `/api/v1/audit-logs` | GET | Query audit logs |
|
|
||||||
| `/api/v1/audit-logs/:id` | GET | Single audit log |
|
|
||||||
| `/api/v1/audit-logs/purge` | POST | Purge expired logs |
|
|
||||||
| `/health` | GET | Health check (detailed) |
|
|
||||||
| `/healthz` | GET | Liveness probe |
|
|
||||||
|
|
||||||
### Container Orchestration
|
|
||||||
|
|
||||||
The `McpOrchestrator` interface abstracts container management:
|
|
||||||
- `DockerContainerManager` - Docker/Podman implementation via dockerode
|
|
||||||
- Future: `KubernetesOrchestrator` for k8s deployments
|
|
||||||
|
|
||||||
## Local Proxy (`@mcpctl/local-proxy`)
|
|
||||||
|
|
||||||
Aggregates multiple MCP servers behind a single STDIO endpoint.
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
- **Tool namespacing**: `servername/toolname` routing
|
|
||||||
- **Resource forwarding**: `resources/list` and `resources/read`
|
|
||||||
- **Prompt forwarding**: `prompts/list` and `prompts/get`
|
|
||||||
- **Notification pass-through**: Upstream notifications forwarded to client
|
|
||||||
- **Health monitoring**: Periodic health checks with state tracking
|
|
||||||
- **Transport support**: STDIO (child process) and HTTP (SSE/Streamable HTTP)
|
|
||||||
|
|
||||||
### Usage
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Via config file
|
|
||||||
mcpctl-proxy --config proxy.json
|
|
||||||
|
|
||||||
# Via CLI flags
|
|
||||||
mcpctl-proxy --upstream "slack:npx -y @anthropic/slack-mcp" \
|
|
||||||
--upstream "github:npx -y @anthropic/github-mcp"
|
|
||||||
```
|
|
||||||
|
|
||||||
## Database (`@mcpctl/db`)
|
|
||||||
|
|
||||||
Prisma ORM with PostgreSQL. Key models:
|
|
||||||
|
|
||||||
- **User** / **Session** - Authentication
|
|
||||||
- **McpServer** - Server definitions (name, transport, package, docker image)
|
|
||||||
- **McpProfile** - Per-server configurations (env overrides, permissions)
|
|
||||||
- **Project** - Grouping of profiles for a workspace
|
|
||||||
- **McpInstance** - Running container instances with lifecycle state
|
|
||||||
- **AuditLog** - Immutable operation audit trail
|
|
||||||
|
|
||||||
## Shared (`@mcpctl/shared`)
|
|
||||||
|
|
||||||
Constants and utilities shared across packages:
|
|
||||||
- `APP_NAME`, `APP_VERSION`
|
|
||||||
- Common type definitions
|
|
||||||
|
|
||||||
## Design Principles
|
|
||||||
|
|
||||||
1. **Interface-based repositories** - All data access through interfaces for testability
|
|
||||||
2. **Dependency injection** - Services receive dependencies via constructor
|
|
||||||
3. **Zod validation** - All user input validated with Zod schemas
|
|
||||||
4. **Namespaced errors** - Custom error classes with HTTP status codes
|
|
||||||
5. **TypeScript strict mode** - `exactOptionalPropertyTypes`, `noUncheckedIndexedAccess`
|
|
||||||
@@ -1,157 +0,0 @@
|
|||||||
# Getting Started with mcpctl
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
- Node.js >= 20.0.0
|
|
||||||
- pnpm >= 9.0.0
|
|
||||||
- PostgreSQL (for mcpd)
|
|
||||||
- Docker or Podman (for container management)
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Clone the repository
|
|
||||||
git clone <repo-url>
|
|
||||||
cd mcpctl
|
|
||||||
|
|
||||||
# Install dependencies
|
|
||||||
pnpm install
|
|
||||||
|
|
||||||
# Generate Prisma client
|
|
||||||
pnpm --filter @mcpctl/db exec prisma generate
|
|
||||||
|
|
||||||
# Build all packages
|
|
||||||
pnpm build
|
|
||||||
```
|
|
||||||
|
|
||||||
## Quick Start
|
|
||||||
|
|
||||||
### 1. Start the Database
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Start PostgreSQL via Docker Compose
|
|
||||||
pnpm db:up
|
|
||||||
|
|
||||||
# Run database migrations
|
|
||||||
pnpm --filter @mcpctl/db exec prisma db push
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Start the Daemon
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd src/mcpd
|
|
||||||
pnpm dev
|
|
||||||
```
|
|
||||||
|
|
||||||
The daemon starts on `http://localhost:4444` by default.
|
|
||||||
|
|
||||||
### 3. Use the CLI
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check daemon status
|
|
||||||
mcpctl status
|
|
||||||
|
|
||||||
# Register an MCP server
|
|
||||||
mcpctl apply config.yaml
|
|
||||||
|
|
||||||
# Or use the interactive wizard
|
|
||||||
mcpctl setup my-server
|
|
||||||
|
|
||||||
# List registered servers
|
|
||||||
mcpctl get servers
|
|
||||||
|
|
||||||
# Start an instance
|
|
||||||
mcpctl instance start <server-id>
|
|
||||||
|
|
||||||
# Check instance status
|
|
||||||
mcpctl instance list
|
|
||||||
|
|
||||||
# View instance logs
|
|
||||||
mcpctl instance logs <instance-id>
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Generate .mcp.json for Claude
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Create a project
|
|
||||||
mcpctl project create my-workspace
|
|
||||||
|
|
||||||
# Assign profiles to project
|
|
||||||
mcpctl project set-profiles <project-id> <profile-id-1> <profile-id-2>
|
|
||||||
|
|
||||||
# Generate .mcp.json
|
|
||||||
mcpctl claude generate <project-id>
|
|
||||||
|
|
||||||
# Or manually add servers
|
|
||||||
mcpctl claude add my-server -c npx -a -y @my/mcp-server
|
|
||||||
```
|
|
||||||
|
|
||||||
## Example Configuration
|
|
||||||
|
|
||||||
Create a `config.yaml` file:
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
servers:
|
|
||||||
- name: slack
|
|
||||||
description: Slack MCP server
|
|
||||||
transport: STDIO
|
|
||||||
packageName: "@anthropic/slack-mcp"
|
|
||||||
env:
|
|
||||||
- name: SLACK_TOKEN
|
|
||||||
valueFrom:
|
|
||||||
secretRef:
|
|
||||||
name: slack-secrets
|
|
||||||
key: token
|
|
||||||
|
|
||||||
- name: github
|
|
||||||
description: GitHub MCP server
|
|
||||||
transport: STDIO
|
|
||||||
packageName: "@anthropic/github-mcp"
|
|
||||||
|
|
||||||
profiles:
|
|
||||||
- name: default
|
|
||||||
server: slack
|
|
||||||
envOverrides:
|
|
||||||
SLACK_TOKEN: "xoxb-your-token"
|
|
||||||
|
|
||||||
projects:
|
|
||||||
- name: dev-workspace
|
|
||||||
description: Development workspace
|
|
||||||
```
|
|
||||||
|
|
||||||
Apply it:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
mcpctl apply config.yaml
|
|
||||||
```
|
|
||||||
|
|
||||||
## Running Tests
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Run all tests
|
|
||||||
pnpm test:run
|
|
||||||
|
|
||||||
# Run tests for a specific package
|
|
||||||
pnpm --filter @mcpctl/cli test:run
|
|
||||||
pnpm --filter @mcpctl/mcpd test:run
|
|
||||||
pnpm --filter @mcpctl/local-proxy test:run
|
|
||||||
|
|
||||||
# Run tests with coverage
|
|
||||||
pnpm test:coverage
|
|
||||||
|
|
||||||
# Typecheck
|
|
||||||
pnpm typecheck
|
|
||||||
|
|
||||||
# Lint
|
|
||||||
pnpm lint
|
|
||||||
```
|
|
||||||
|
|
||||||
## Development
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Watch mode for tests
|
|
||||||
pnpm test
|
|
||||||
|
|
||||||
# Build in watch mode
|
|
||||||
cd src/cli && pnpm dev
|
|
||||||
```
|
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
servers:
|
|
||||||
- name: ha-mcp
|
|
||||||
description: "Home Assistant MCP - smart home control via MCP"
|
|
||||||
dockerImage: "ghcr.io/homeassistant-ai/ha-mcp:2.4"
|
|
||||||
transport: STREAMABLE_HTTP
|
|
||||||
containerPort: 3000
|
|
||||||
# For mcpd-managed containers:
|
|
||||||
command:
|
|
||||||
- python
|
|
||||||
- "-c"
|
|
||||||
- "from ha_mcp.server import HomeAssistantSmartMCPServer; s = HomeAssistantSmartMCPServer(); s.mcp.run(transport='sse', host='0.0.0.0', port=3000)"
|
|
||||||
# For connecting to an already-running instance (host.containers.internal for container-to-host):
|
|
||||||
externalUrl: "http://host.containers.internal:8086/mcp"
|
|
||||||
env:
|
|
||||||
- name: HOMEASSISTANT_URL
|
|
||||||
value: ""
|
|
||||||
- name: HOMEASSISTANT_TOKEN
|
|
||||||
valueFrom:
|
|
||||||
secretRef:
|
|
||||||
name: ha-secrets
|
|
||||||
key: token
|
|
||||||
|
|
||||||
profiles:
|
|
||||||
- name: production
|
|
||||||
server: ha-mcp
|
|
||||||
envOverrides:
|
|
||||||
HOMEASSISTANT_URL: "https://ha.itaz.eu"
|
|
||||||
HOMEASSISTANT_TOKEN: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiIyNjFlZTRhOWI2MGM0YTllOGJkNTIxN2Q3YmVmZDkzNSIsImlhdCI6MTc3MDA3NjYzOCwiZXhwIjoyMDg1NDM2NjM4fQ.17mAQxIrCBrQx3ogqAUetwEt-cngRmJiH-e7sLt-3FY"
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Full deployment: Docker image → Portainer stack → RPM build/publish/install
|
|
||||||
set -e
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
||||||
cd "$SCRIPT_DIR"
|
|
||||||
|
|
||||||
# Load .env
|
|
||||||
if [ -f .env ]; then
|
|
||||||
set -a; source .env; set +a
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "========================================"
|
|
||||||
echo " mcpctl Full Deploy"
|
|
||||||
echo "========================================"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo ">>> Step 1/3: Build & push mcpd Docker image"
|
|
||||||
echo ""
|
|
||||||
bash scripts/build-mcpd.sh "$@"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo ">>> Step 2/3: Deploy stack to production"
|
|
||||||
echo ""
|
|
||||||
bash deploy.sh
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo ">>> Step 3/3: Build, publish & install RPM"
|
|
||||||
echo ""
|
|
||||||
bash scripts/release.sh
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "========================================"
|
|
||||||
echo " Full deploy complete!"
|
|
||||||
echo "========================================"
|
|
||||||
@@ -1,26 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Build (if needed) and install mcpctl RPM locally
|
|
||||||
set -e
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
|
||||||
cd "$SCRIPT_DIR"
|
|
||||||
|
|
||||||
RPM_FILE=$(ls dist/mcpctl-*.rpm 2>/dev/null | head -1)
|
|
||||||
|
|
||||||
# Build if no RPM exists or if source is newer than the RPM
|
|
||||||
if [[ -z "$RPM_FILE" ]] || [[ $(find src/ -name '*.ts' -newer "$RPM_FILE" 2>/dev/null | head -1) ]]; then
|
|
||||||
echo "==> Building RPM..."
|
|
||||||
bash scripts/build-rpm.sh
|
|
||||||
RPM_FILE=$(ls dist/mcpctl-*.rpm 2>/dev/null | head -1)
|
|
||||||
else
|
|
||||||
echo "==> RPM is up to date: $RPM_FILE"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "==> Installing $RPM_FILE..."
|
|
||||||
sudo rpm -Uvh --force "$RPM_FILE"
|
|
||||||
|
|
||||||
echo "==> Reloading systemd user units..."
|
|
||||||
systemctl --user daemon-reload
|
|
||||||
|
|
||||||
echo "==> Done!"
|
|
||||||
echo " Enable mcplocal: systemctl --user enable --now mcplocal"
|
|
||||||
28
nfpm.yaml
28
nfpm.yaml
@@ -1,28 +0,0 @@
|
|||||||
name: mcpctl
|
|
||||||
arch: amd64
|
|
||||||
version: 0.1.0
|
|
||||||
release: "1"
|
|
||||||
maintainer: michal
|
|
||||||
description: kubectl-like CLI for managing MCP servers
|
|
||||||
license: MIT
|
|
||||||
contents:
|
|
||||||
- src: ./dist/mcpctl
|
|
||||||
dst: /usr/bin/mcpctl
|
|
||||||
file_info:
|
|
||||||
mode: 0755
|
|
||||||
- src: ./dist/mcpctl-local
|
|
||||||
dst: /usr/bin/mcpctl-local
|
|
||||||
file_info:
|
|
||||||
mode: 0755
|
|
||||||
- src: ./deploy/mcplocal.service
|
|
||||||
dst: /usr/lib/systemd/user/mcplocal.service
|
|
||||||
file_info:
|
|
||||||
mode: 0644
|
|
||||||
- src: ./completions/mcpctl.bash
|
|
||||||
dst: /usr/share/bash-completion/completions/mcpctl
|
|
||||||
file_info:
|
|
||||||
mode: 0644
|
|
||||||
- src: ./completions/mcpctl.fish
|
|
||||||
dst: /usr/share/fish/vendor_completions.d/mcpctl.fish
|
|
||||||
file_info:
|
|
||||||
mode: 0644
|
|
||||||
10
package.json
10
package.json
@@ -15,14 +15,7 @@
|
|||||||
"clean": "pnpm -r run clean && rimraf node_modules",
|
"clean": "pnpm -r run clean && rimraf node_modules",
|
||||||
"db:up": "docker compose -f deploy/docker-compose.yml up -d",
|
"db:up": "docker compose -f deploy/docker-compose.yml up -d",
|
||||||
"db:down": "docker compose -f deploy/docker-compose.yml down",
|
"db:down": "docker compose -f deploy/docker-compose.yml down",
|
||||||
"typecheck": "tsc --build",
|
"typecheck": "tsc --build"
|
||||||
"rpm:build": "bash scripts/build-rpm.sh",
|
|
||||||
"rpm:publish": "bash scripts/publish-rpm.sh",
|
|
||||||
"release": "bash scripts/release.sh",
|
|
||||||
"mcpd:build": "bash scripts/build-mcpd.sh",
|
|
||||||
"mcpd:deploy": "bash deploy.sh",
|
|
||||||
"mcpd:deploy-dry": "bash deploy.sh --dry-run",
|
|
||||||
"mcpd:logs": "bash logs.sh"
|
|
||||||
},
|
},
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=20.0.0",
|
"node": ">=20.0.0",
|
||||||
@@ -30,7 +23,6 @@
|
|||||||
},
|
},
|
||||||
"packageManager": "pnpm@9.15.0",
|
"packageManager": "pnpm@9.15.0",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^25.3.0",
|
|
||||||
"@typescript-eslint/eslint-plugin": "^8.56.0",
|
"@typescript-eslint/eslint-plugin": "^8.56.0",
|
||||||
"@typescript-eslint/parser": "^8.56.0",
|
"@typescript-eslint/parser": "^8.56.0",
|
||||||
"@vitest/coverage-v8": "^4.0.18",
|
"@vitest/coverage-v8": "^4.0.18",
|
||||||
|
|||||||
809
pnpm-lock.yaml
generated
809
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,32 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Build mcpd Docker image and push to Gitea container registry
|
|
||||||
set -e
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
|
||||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
|
||||||
cd "$PROJECT_ROOT"
|
|
||||||
|
|
||||||
# Load .env for GITEA_TOKEN
|
|
||||||
if [ -f .env ]; then
|
|
||||||
set -a; source .env; set +a
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Push directly to internal address (external proxy has body size limit)
|
|
||||||
REGISTRY="10.0.0.194:3012"
|
|
||||||
IMAGE="mcpd"
|
|
||||||
TAG="${1:-latest}"
|
|
||||||
|
|
||||||
echo "==> Building mcpd image..."
|
|
||||||
podman build -t "$IMAGE:$TAG" -f deploy/Dockerfile.mcpd .
|
|
||||||
|
|
||||||
echo "==> Tagging as $REGISTRY/michal/$IMAGE:$TAG..."
|
|
||||||
podman tag "$IMAGE:$TAG" "$REGISTRY/michal/$IMAGE:$TAG"
|
|
||||||
|
|
||||||
echo "==> Logging in to $REGISTRY..."
|
|
||||||
podman login --tls-verify=false -u michal -p "$GITEA_TOKEN" "$REGISTRY"
|
|
||||||
|
|
||||||
echo "==> Pushing to $REGISTRY/michal/$IMAGE:$TAG..."
|
|
||||||
podman push --tls-verify=false "$REGISTRY/michal/$IMAGE:$TAG"
|
|
||||||
|
|
||||||
echo "==> Done!"
|
|
||||||
echo " Image: $REGISTRY/michal/$IMAGE:$TAG"
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
|
||||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
|
||||||
cd "$PROJECT_ROOT"
|
|
||||||
|
|
||||||
# Load .env if present
|
|
||||||
if [ -f .env ]; then
|
|
||||||
set -a; source .env; set +a
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Ensure tools are on PATH
|
|
||||||
export PATH="$HOME/.npm-global/bin:$HOME/.bun/bin:$HOME/.local/bin:$PATH"
|
|
||||||
|
|
||||||
echo "==> Building TypeScript..."
|
|
||||||
pnpm build
|
|
||||||
|
|
||||||
echo "==> Bundling standalone binaries..."
|
|
||||||
mkdir -p dist
|
|
||||||
rm -f dist/mcpctl dist/mcpctl-local dist/mcpctl-*.rpm
|
|
||||||
bun build src/cli/src/index.ts --compile --outfile dist/mcpctl
|
|
||||||
bun build src/mcplocal/src/main.ts --compile --outfile dist/mcpctl-local
|
|
||||||
|
|
||||||
echo "==> Packaging RPM..."
|
|
||||||
nfpm pkg --packager rpm --target dist/
|
|
||||||
|
|
||||||
RPM_FILE=$(ls dist/mcpctl-*.rpm 2>/dev/null | head -1)
|
|
||||||
echo "==> Built: $RPM_FILE"
|
|
||||||
echo " Size: $(du -h "$RPM_FILE" | cut -f1)"
|
|
||||||
rpm -qpi "$RPM_FILE"
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
|
||||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
|
||||||
cd "$PROJECT_ROOT"
|
|
||||||
|
|
||||||
# Load .env if present
|
|
||||||
if [ -f .env ]; then
|
|
||||||
set -a; source .env; set +a
|
|
||||||
fi
|
|
||||||
|
|
||||||
GITEA_URL="${GITEA_URL:-http://10.0.0.194:3012}"
|
|
||||||
GITEA_OWNER="${GITEA_OWNER:-michal}"
|
|
||||||
|
|
||||||
if [ -z "$GITEA_TOKEN" ]; then
|
|
||||||
echo "Error: GITEA_TOKEN not set. Add it to .env or export it."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
RPM_FILE=$(ls dist/mcpctl-*.rpm 2>/dev/null | head -1)
|
|
||||||
if [ -z "$RPM_FILE" ]; then
|
|
||||||
echo "Error: No RPM found in dist/. Run scripts/build-rpm.sh first."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Get version string as it appears in Gitea (e.g. "0.1.0-1")
|
|
||||||
RPM_VERSION=$(rpm -qp --queryformat '%{VERSION}-%{RELEASE}' "$RPM_FILE")
|
|
||||||
|
|
||||||
echo "==> Publishing $RPM_FILE (version $RPM_VERSION) to ${GITEA_URL}..."
|
|
||||||
|
|
||||||
# Check if version already exists and delete it first
|
|
||||||
EXISTING=$(curl -s -o /dev/null -w "%{http_code}" \
|
|
||||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
|
||||||
"${GITEA_URL}/api/v1/packages/${GITEA_OWNER}/rpm/mcpctl/${RPM_VERSION}")
|
|
||||||
|
|
||||||
if [ "$EXISTING" = "200" ]; then
|
|
||||||
echo "==> Version $RPM_VERSION already exists, replacing..."
|
|
||||||
curl -s -o /dev/null -X DELETE \
|
|
||||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
|
||||||
"${GITEA_URL}/api/v1/packages/${GITEA_OWNER}/rpm/mcpctl/${RPM_VERSION}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Upload
|
|
||||||
curl --fail -s -X PUT \
|
|
||||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
|
||||||
--upload-file "$RPM_FILE" \
|
|
||||||
"${GITEA_URL}/api/packages/${GITEA_OWNER}/rpm/upload"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "==> Published successfully!"
|
|
||||||
echo ""
|
|
||||||
echo "Install with:"
|
|
||||||
echo " sudo dnf config-manager --add-repo ${GITEA_URL}/api/packages/${GITEA_OWNER}/rpm.repo"
|
|
||||||
echo " sudo dnf install mcpctl"
|
|
||||||
@@ -1,41 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
|
||||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
|
||||||
cd "$PROJECT_ROOT"
|
|
||||||
|
|
||||||
# Load .env if present
|
|
||||||
if [ -f .env ]; then
|
|
||||||
set -a; source .env; set +a
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "=== mcpctl release ==="
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Build
|
|
||||||
bash scripts/build-rpm.sh
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Publish
|
|
||||||
bash scripts/publish-rpm.sh
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Install locally
|
|
||||||
echo "==> Installing locally..."
|
|
||||||
RPM_FILE=$(ls dist/mcpctl-*.rpm 2>/dev/null | head -1)
|
|
||||||
sudo rpm -U --force "$RPM_FILE"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "==> Installed:"
|
|
||||||
mcpctl --version
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
GITEA_URL="${GITEA_URL:-http://10.0.0.194:3012}"
|
|
||||||
GITEA_OWNER="${GITEA_OWNER:-michal}"
|
|
||||||
echo "=== Done! ==="
|
|
||||||
echo "Others can install with:"
|
|
||||||
echo " sudo dnf config-manager --add-repo ${GITEA_URL}/api/packages/${GITEA_OWNER}/rpm.repo"
|
|
||||||
echo " sudo dnf install mcpctl"
|
|
||||||
@@ -1,98 +0,0 @@
|
|||||||
import http from 'node:http';
|
|
||||||
|
|
||||||
export interface ApiClientOptions {
|
|
||||||
baseUrl: string;
|
|
||||||
timeout?: number | undefined;
|
|
||||||
token?: string | undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ApiResponse<T = unknown> {
|
|
||||||
status: number;
|
|
||||||
data: T;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class ApiError extends Error {
|
|
||||||
constructor(
|
|
||||||
public readonly status: number,
|
|
||||||
public readonly body: string,
|
|
||||||
) {
|
|
||||||
super(`API error ${status}: ${body}`);
|
|
||||||
this.name = 'ApiError';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function request<T>(method: string, url: string, timeout: number, body?: unknown, token?: string): Promise<ApiResponse<T>> {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const parsed = new URL(url);
|
|
||||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' };
|
|
||||||
if (token) {
|
|
||||||
headers['Authorization'] = `Bearer ${token}`;
|
|
||||||
}
|
|
||||||
const opts: http.RequestOptions = {
|
|
||||||
hostname: parsed.hostname,
|
|
||||||
port: parsed.port,
|
|
||||||
path: parsed.pathname + parsed.search,
|
|
||||||
method,
|
|
||||||
timeout,
|
|
||||||
headers,
|
|
||||||
};
|
|
||||||
|
|
||||||
const req = http.request(opts, (res) => {
|
|
||||||
const chunks: Buffer[] = [];
|
|
||||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
|
||||||
res.on('end', () => {
|
|
||||||
const raw = Buffer.concat(chunks).toString('utf-8');
|
|
||||||
const status = res.statusCode ?? 0;
|
|
||||||
if (status >= 400) {
|
|
||||||
reject(new ApiError(status, raw));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
resolve({ status, data: JSON.parse(raw) as T });
|
|
||||||
} catch {
|
|
||||||
resolve({ status, data: raw as unknown as T });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on('error', reject);
|
|
||||||
req.on('timeout', () => {
|
|
||||||
req.destroy();
|
|
||||||
reject(new Error(`Request to ${url} timed out`));
|
|
||||||
});
|
|
||||||
if (body !== undefined) {
|
|
||||||
req.write(JSON.stringify(body));
|
|
||||||
}
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export class ApiClient {
|
|
||||||
private baseUrl: string;
|
|
||||||
private timeout: number;
|
|
||||||
private token?: string | undefined;
|
|
||||||
|
|
||||||
constructor(opts: ApiClientOptions) {
|
|
||||||
this.baseUrl = opts.baseUrl.replace(/\/$/, '');
|
|
||||||
this.timeout = opts.timeout ?? 10000;
|
|
||||||
this.token = opts.token;
|
|
||||||
}
|
|
||||||
|
|
||||||
async get<T = unknown>(path: string): Promise<T> {
|
|
||||||
const res = await request<T>('GET', `${this.baseUrl}${path}`, this.timeout, undefined, this.token);
|
|
||||||
return res.data;
|
|
||||||
}
|
|
||||||
|
|
||||||
async post<T = unknown>(path: string, body?: unknown): Promise<T> {
|
|
||||||
const res = await request<T>('POST', `${this.baseUrl}${path}`, this.timeout, body, this.token);
|
|
||||||
return res.data;
|
|
||||||
}
|
|
||||||
|
|
||||||
async put<T = unknown>(path: string, body?: unknown): Promise<T> {
|
|
||||||
const res = await request<T>('PUT', `${this.baseUrl}${path}`, this.timeout, body, this.token);
|
|
||||||
return res.data;
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(path: string): Promise<void> {
|
|
||||||
await request('DELETE', `${this.baseUrl}${path}`, this.timeout, undefined, this.token);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
import { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync, chmodSync } from 'node:fs';
|
|
||||||
import { join } from 'node:path';
|
|
||||||
import { homedir } from 'node:os';
|
|
||||||
|
|
||||||
export interface StoredCredentials {
|
|
||||||
token: string;
|
|
||||||
mcpdUrl: string;
|
|
||||||
user: string;
|
|
||||||
expiresAt?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface CredentialsDeps {
|
|
||||||
configDir: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
function defaultConfigDir(): string {
|
|
||||||
return join(homedir(), '.mcpctl');
|
|
||||||
}
|
|
||||||
|
|
||||||
function credentialsPath(deps?: Partial<CredentialsDeps>): string {
|
|
||||||
return join(deps?.configDir ?? defaultConfigDir(), 'credentials');
|
|
||||||
}
|
|
||||||
|
|
||||||
export function saveCredentials(creds: StoredCredentials, deps?: Partial<CredentialsDeps>): void {
|
|
||||||
const dir = deps?.configDir ?? defaultConfigDir();
|
|
||||||
if (!existsSync(dir)) {
|
|
||||||
mkdirSync(dir, { recursive: true });
|
|
||||||
}
|
|
||||||
const path = credentialsPath(deps);
|
|
||||||
writeFileSync(path, JSON.stringify(creds, null, 2) + '\n', 'utf-8');
|
|
||||||
chmodSync(path, 0o600);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function loadCredentials(deps?: Partial<CredentialsDeps>): StoredCredentials | null {
|
|
||||||
const path = credentialsPath(deps);
|
|
||||||
if (!existsSync(path)) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
const raw = readFileSync(path, 'utf-8');
|
|
||||||
return JSON.parse(raw) as StoredCredentials;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function deleteCredentials(deps?: Partial<CredentialsDeps>): boolean {
|
|
||||||
const path = credentialsPath(deps);
|
|
||||||
if (!existsSync(path)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
unlinkSync(path);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
export { saveCredentials, loadCredentials, deleteCredentials } from './credentials.js';
|
|
||||||
export type { StoredCredentials, CredentialsDeps } from './credentials.js';
|
|
||||||
@@ -1,316 +0,0 @@
|
|||||||
import { Command } from 'commander';
|
|
||||||
import { readFileSync } from 'node:fs';
|
|
||||||
import yaml from 'js-yaml';
|
|
||||||
import { z } from 'zod';
|
|
||||||
import type { ApiClient } from '../api-client.js';
|
|
||||||
|
|
||||||
const HealthCheckSchema = z.object({
|
|
||||||
tool: z.string().min(1),
|
|
||||||
arguments: z.record(z.unknown()).default({}),
|
|
||||||
intervalSeconds: z.number().int().min(5).max(3600).default(60),
|
|
||||||
timeoutSeconds: z.number().int().min(1).max(120).default(10),
|
|
||||||
failureThreshold: z.number().int().min(1).max(20).default(3),
|
|
||||||
});
|
|
||||||
|
|
||||||
const ServerEnvEntrySchema = z.object({
|
|
||||||
name: z.string().min(1),
|
|
||||||
value: z.string().optional(),
|
|
||||||
valueFrom: z.object({
|
|
||||||
secretRef: z.object({ name: z.string(), key: z.string() }),
|
|
||||||
}).optional(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const ServerSpecSchema = z.object({
|
|
||||||
name: z.string().min(1),
|
|
||||||
description: z.string().default(''),
|
|
||||||
packageName: z.string().optional(),
|
|
||||||
dockerImage: z.string().optional(),
|
|
||||||
transport: z.enum(['STDIO', 'SSE', 'STREAMABLE_HTTP']).default('STDIO'),
|
|
||||||
repositoryUrl: z.string().url().optional(),
|
|
||||||
externalUrl: z.string().url().optional(),
|
|
||||||
command: z.array(z.string()).optional(),
|
|
||||||
containerPort: z.number().int().min(1).max(65535).optional(),
|
|
||||||
replicas: z.number().int().min(0).max(10).default(1),
|
|
||||||
env: z.array(ServerEnvEntrySchema).default([]),
|
|
||||||
healthCheck: HealthCheckSchema.optional(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const SecretSpecSchema = z.object({
|
|
||||||
name: z.string().min(1),
|
|
||||||
data: z.record(z.string()).default({}),
|
|
||||||
});
|
|
||||||
|
|
||||||
const TemplateEnvEntrySchema = z.object({
|
|
||||||
name: z.string().min(1),
|
|
||||||
description: z.string().optional(),
|
|
||||||
required: z.boolean().optional(),
|
|
||||||
defaultValue: z.string().optional(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const TemplateSpecSchema = z.object({
|
|
||||||
name: z.string().min(1),
|
|
||||||
version: z.string().default('1.0.0'),
|
|
||||||
description: z.string().default(''),
|
|
||||||
packageName: z.string().optional(),
|
|
||||||
dockerImage: z.string().optional(),
|
|
||||||
transport: z.enum(['STDIO', 'SSE', 'STREAMABLE_HTTP']).default('STDIO'),
|
|
||||||
repositoryUrl: z.string().optional(),
|
|
||||||
externalUrl: z.string().optional(),
|
|
||||||
command: z.array(z.string()).optional(),
|
|
||||||
containerPort: z.number().int().min(1).max(65535).optional(),
|
|
||||||
replicas: z.number().int().min(0).max(10).default(1),
|
|
||||||
env: z.array(TemplateEnvEntrySchema).default([]),
|
|
||||||
healthCheck: HealthCheckSchema.optional(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const UserSpecSchema = z.object({
|
|
||||||
email: z.string().email(),
|
|
||||||
password: z.string().min(8),
|
|
||||||
name: z.string().optional(),
|
|
||||||
});
|
|
||||||
|
|
||||||
const GroupSpecSchema = z.object({
|
|
||||||
name: z.string().min(1),
|
|
||||||
description: z.string().default(''),
|
|
||||||
members: z.array(z.string().email()).default([]),
|
|
||||||
});
|
|
||||||
|
|
||||||
const RbacSubjectSchema = z.object({
|
|
||||||
kind: z.enum(['User', 'Group']),
|
|
||||||
name: z.string().min(1),
|
|
||||||
});
|
|
||||||
|
|
||||||
const RESOURCE_ALIASES: Record<string, string> = {
|
|
||||||
server: 'servers', instance: 'instances', secret: 'secrets',
|
|
||||||
project: 'projects', template: 'templates', user: 'users', group: 'groups',
|
|
||||||
};
|
|
||||||
|
|
||||||
const RbacRoleBindingSchema = z.union([
|
|
||||||
z.object({
|
|
||||||
role: z.enum(['edit', 'view', 'create', 'delete', 'run', 'expose']),
|
|
||||||
resource: z.string().min(1).transform((r) => RESOURCE_ALIASES[r] ?? r),
|
|
||||||
name: z.string().min(1).optional(),
|
|
||||||
}),
|
|
||||||
z.object({
|
|
||||||
role: z.literal('run'),
|
|
||||||
action: z.string().min(1),
|
|
||||||
}),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const RbacBindingSpecSchema = z.object({
|
|
||||||
name: z.string().min(1),
|
|
||||||
subjects: z.array(RbacSubjectSchema).default([]),
|
|
||||||
roleBindings: z.array(RbacRoleBindingSchema).default([]),
|
|
||||||
});
|
|
||||||
|
|
||||||
const ProjectSpecSchema = z.object({
|
|
||||||
name: z.string().min(1),
|
|
||||||
description: z.string().default(''),
|
|
||||||
proxyMode: z.enum(['direct', 'filtered']).default('direct'),
|
|
||||||
llmProvider: z.string().optional(),
|
|
||||||
llmModel: z.string().optional(),
|
|
||||||
servers: z.array(z.string()).default([]),
|
|
||||||
});
|
|
||||||
|
|
||||||
const ApplyConfigSchema = z.object({
|
|
||||||
secrets: z.array(SecretSpecSchema).default([]),
|
|
||||||
servers: z.array(ServerSpecSchema).default([]),
|
|
||||||
users: z.array(UserSpecSchema).default([]),
|
|
||||||
groups: z.array(GroupSpecSchema).default([]),
|
|
||||||
projects: z.array(ProjectSpecSchema).default([]),
|
|
||||||
templates: z.array(TemplateSpecSchema).default([]),
|
|
||||||
rbacBindings: z.array(RbacBindingSpecSchema).default([]),
|
|
||||||
rbac: z.array(RbacBindingSpecSchema).default([]),
|
|
||||||
}).transform((data) => ({
|
|
||||||
...data,
|
|
||||||
// Merge rbac into rbacBindings so both keys work
|
|
||||||
rbacBindings: [...data.rbacBindings, ...data.rbac],
|
|
||||||
}));
|
|
||||||
|
|
||||||
export type ApplyConfig = z.infer<typeof ApplyConfigSchema>;
|
|
||||||
|
|
||||||
export interface ApplyCommandDeps {
|
|
||||||
client: ApiClient;
|
|
||||||
log: (...args: unknown[]) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createApplyCommand(deps: ApplyCommandDeps): Command {
|
|
||||||
const { client, log } = deps;
|
|
||||||
|
|
||||||
return new Command('apply')
|
|
||||||
.description('Apply declarative configuration from a YAML or JSON file')
|
|
||||||
.argument('[file]', 'Path to config file (.yaml, .yml, or .json)')
|
|
||||||
.option('-f, --file <file>', 'Path to config file (alternative to positional arg)')
|
|
||||||
.option('--dry-run', 'Validate and show changes without applying')
|
|
||||||
.action(async (fileArg: string | undefined, opts: { file?: string; dryRun?: boolean }) => {
|
|
||||||
const file = fileArg ?? opts.file;
|
|
||||||
if (!file) {
|
|
||||||
throw new Error('File path required. Usage: mcpctl apply <file> or mcpctl apply -f <file>');
|
|
||||||
}
|
|
||||||
const config = loadConfigFile(file);
|
|
||||||
|
|
||||||
if (opts.dryRun) {
|
|
||||||
log('Dry run - would apply:');
|
|
||||||
if (config.secrets.length > 0) log(` ${config.secrets.length} secret(s)`);
|
|
||||||
if (config.servers.length > 0) log(` ${config.servers.length} server(s)`);
|
|
||||||
if (config.users.length > 0) log(` ${config.users.length} user(s)`);
|
|
||||||
if (config.groups.length > 0) log(` ${config.groups.length} group(s)`);
|
|
||||||
if (config.projects.length > 0) log(` ${config.projects.length} project(s)`);
|
|
||||||
if (config.templates.length > 0) log(` ${config.templates.length} template(s)`);
|
|
||||||
if (config.rbacBindings.length > 0) log(` ${config.rbacBindings.length} rbacBinding(s)`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await applyConfig(client, config, log);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadConfigFile(path: string): ApplyConfig {
|
|
||||||
const raw = readFileSync(path, 'utf-8');
|
|
||||||
let parsed: unknown;
|
|
||||||
|
|
||||||
if (path.endsWith('.json')) {
|
|
||||||
parsed = JSON.parse(raw);
|
|
||||||
} else {
|
|
||||||
parsed = yaml.load(raw);
|
|
||||||
}
|
|
||||||
|
|
||||||
return ApplyConfigSchema.parse(parsed);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function applyConfig(client: ApiClient, config: ApplyConfig, log: (...args: unknown[]) => void): Promise<void> {
|
|
||||||
// Apply order: secrets, servers, users, groups, projects, templates, rbacBindings
|
|
||||||
|
|
||||||
// Apply secrets
|
|
||||||
for (const secret of config.secrets) {
|
|
||||||
try {
|
|
||||||
const existing = await findByName(client, 'secrets', secret.name);
|
|
||||||
if (existing) {
|
|
||||||
await client.put(`/api/v1/secrets/${(existing as { id: string }).id}`, { data: secret.data });
|
|
||||||
log(`Updated secret: ${secret.name}`);
|
|
||||||
} else {
|
|
||||||
await client.post('/api/v1/secrets', secret);
|
|
||||||
log(`Created secret: ${secret.name}`);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
log(`Error applying secret '${secret.name}': ${err instanceof Error ? err.message : err}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply servers
|
|
||||||
for (const server of config.servers) {
|
|
||||||
try {
|
|
||||||
const existing = await findByName(client, 'servers', server.name);
|
|
||||||
if (existing) {
|
|
||||||
await client.put(`/api/v1/servers/${(existing as { id: string }).id}`, server);
|
|
||||||
log(`Updated server: ${server.name}`);
|
|
||||||
} else {
|
|
||||||
await client.post('/api/v1/servers', server);
|
|
||||||
log(`Created server: ${server.name}`);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
log(`Error applying server '${server.name}': ${err instanceof Error ? err.message : err}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply users (matched by email)
|
|
||||||
for (const user of config.users) {
|
|
||||||
try {
|
|
||||||
const existing = await findByField(client, 'users', 'email', user.email);
|
|
||||||
if (existing) {
|
|
||||||
await client.put(`/api/v1/users/${(existing as { id: string }).id}`, user);
|
|
||||||
log(`Updated user: ${user.email}`);
|
|
||||||
} else {
|
|
||||||
await client.post('/api/v1/users', user);
|
|
||||||
log(`Created user: ${user.email}`);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
log(`Error applying user '${user.email}': ${err instanceof Error ? err.message : err}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply groups
|
|
||||||
for (const group of config.groups) {
|
|
||||||
try {
|
|
||||||
const existing = await findByName(client, 'groups', group.name);
|
|
||||||
if (existing) {
|
|
||||||
await client.put(`/api/v1/groups/${(existing as { id: string }).id}`, group);
|
|
||||||
log(`Updated group: ${group.name}`);
|
|
||||||
} else {
|
|
||||||
await client.post('/api/v1/groups', group);
|
|
||||||
log(`Created group: ${group.name}`);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
log(`Error applying group '${group.name}': ${err instanceof Error ? err.message : err}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply projects (send full spec including servers)
|
|
||||||
for (const project of config.projects) {
|
|
||||||
try {
|
|
||||||
const existing = await findByName(client, 'projects', project.name);
|
|
||||||
if (existing) {
|
|
||||||
await client.put(`/api/v1/projects/${(existing as { id: string }).id}`, project);
|
|
||||||
log(`Updated project: ${project.name}`);
|
|
||||||
} else {
|
|
||||||
await client.post('/api/v1/projects', project);
|
|
||||||
log(`Created project: ${project.name}`);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
log(`Error applying project '${project.name}': ${err instanceof Error ? err.message : err}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply templates
|
|
||||||
for (const template of config.templates) {
|
|
||||||
try {
|
|
||||||
const existing = await findByName(client, 'templates', template.name);
|
|
||||||
if (existing) {
|
|
||||||
await client.put(`/api/v1/templates/${(existing as { id: string }).id}`, template);
|
|
||||||
log(`Updated template: ${template.name}`);
|
|
||||||
} else {
|
|
||||||
await client.post('/api/v1/templates', template);
|
|
||||||
log(`Created template: ${template.name}`);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
log(`Error applying template '${template.name}': ${err instanceof Error ? err.message : err}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply RBAC bindings
|
|
||||||
for (const rbacBinding of config.rbacBindings) {
|
|
||||||
try {
|
|
||||||
const existing = await findByName(client, 'rbac', rbacBinding.name);
|
|
||||||
if (existing) {
|
|
||||||
await client.put(`/api/v1/rbac/${(existing as { id: string }).id}`, rbacBinding);
|
|
||||||
log(`Updated rbacBinding: ${rbacBinding.name}`);
|
|
||||||
} else {
|
|
||||||
await client.post('/api/v1/rbac', rbacBinding);
|
|
||||||
log(`Created rbacBinding: ${rbacBinding.name}`);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
log(`Error applying rbacBinding '${rbacBinding.name}': ${err instanceof Error ? err.message : err}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function findByName(client: ApiClient, resource: string, name: string): Promise<unknown | null> {
|
|
||||||
try {
|
|
||||||
const items = await client.get<Array<{ name: string }>>(`/api/v1/${resource}`);
|
|
||||||
return items.find((item) => item.name === name) ?? null;
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function findByField<T extends string>(client: ApiClient, resource: string, field: T, value: string): Promise<unknown | null> {
|
|
||||||
try {
|
|
||||||
const items = await client.get<Array<Record<string, unknown>>>(`/api/v1/${resource}`);
|
|
||||||
return items.find((item) => item[field] === value) ?? null;
|
|
||||||
} catch {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Export for testing
|
|
||||||
export { loadConfigFile, applyConfig };
|
|
||||||
@@ -1,239 +0,0 @@
|
|||||||
import { Command } from 'commander';
|
|
||||||
import http from 'node:http';
|
|
||||||
import { loadConfig } from '../config/index.js';
|
|
||||||
import type { ConfigLoaderDeps } from '../config/index.js';
|
|
||||||
import { saveCredentials, loadCredentials, deleteCredentials } from '../auth/index.js';
|
|
||||||
import type { CredentialsDeps } from '../auth/index.js';
|
|
||||||
|
|
||||||
export interface PromptDeps {
|
|
||||||
input(message: string): Promise<string>;
|
|
||||||
password(message: string): Promise<string>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface StatusResponse {
|
|
||||||
hasUsers: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AuthCommandDeps {
|
|
||||||
configDeps: Partial<ConfigLoaderDeps>;
|
|
||||||
credentialsDeps: Partial<CredentialsDeps>;
|
|
||||||
prompt: PromptDeps;
|
|
||||||
log: (...args: string[]) => void;
|
|
||||||
loginRequest: (mcpdUrl: string, email: string, password: string) => Promise<LoginResponse>;
|
|
||||||
logoutRequest: (mcpdUrl: string, token: string) => Promise<void>;
|
|
||||||
statusRequest: (mcpdUrl: string) => Promise<StatusResponse>;
|
|
||||||
bootstrapRequest: (mcpdUrl: string, email: string, password: string, name?: string) => Promise<LoginResponse>;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface LoginResponse {
|
|
||||||
token: string;
|
|
||||||
user: { email: string };
|
|
||||||
}
|
|
||||||
|
|
||||||
function defaultLoginRequest(mcpdUrl: string, email: string, password: string): Promise<LoginResponse> {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const url = new URL('/api/v1/auth/login', mcpdUrl);
|
|
||||||
const body = JSON.stringify({ email, password });
|
|
||||||
const opts: http.RequestOptions = {
|
|
||||||
hostname: url.hostname,
|
|
||||||
port: url.port,
|
|
||||||
path: url.pathname,
|
|
||||||
method: 'POST',
|
|
||||||
timeout: 10000,
|
|
||||||
headers: { 'Content-Type': 'application/json', 'Content-Length': Buffer.byteLength(body) },
|
|
||||||
};
|
|
||||||
const req = http.request(opts, (res) => {
|
|
||||||
const chunks: Buffer[] = [];
|
|
||||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
|
||||||
res.on('end', () => {
|
|
||||||
const raw = Buffer.concat(chunks).toString('utf-8');
|
|
||||||
if (res.statusCode === 401) {
|
|
||||||
reject(new Error('Invalid credentials'));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if ((res.statusCode ?? 0) >= 400) {
|
|
||||||
reject(new Error(`Login failed (${res.statusCode}): ${raw}`));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
resolve(JSON.parse(raw) as LoginResponse);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on('error', (err) => reject(new Error(`Cannot reach mcpd: ${err.message}`)));
|
|
||||||
req.on('timeout', () => { req.destroy(); reject(new Error('Login request timed out')); });
|
|
||||||
req.write(body);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function defaultLogoutRequest(mcpdUrl: string, token: string): Promise<void> {
|
|
||||||
return new Promise((resolve) => {
|
|
||||||
const url = new URL('/api/v1/auth/logout', mcpdUrl);
|
|
||||||
const opts: http.RequestOptions = {
|
|
||||||
hostname: url.hostname,
|
|
||||||
port: url.port,
|
|
||||||
path: url.pathname,
|
|
||||||
method: 'POST',
|
|
||||||
timeout: 10000,
|
|
||||||
headers: { 'Authorization': `Bearer ${token}` },
|
|
||||||
};
|
|
||||||
const req = http.request(opts, (res) => {
|
|
||||||
res.resume();
|
|
||||||
res.on('end', () => resolve());
|
|
||||||
});
|
|
||||||
req.on('error', () => resolve()); // Don't fail logout on network errors
|
|
||||||
req.on('timeout', () => { req.destroy(); resolve(); });
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function defaultStatusRequest(mcpdUrl: string): Promise<StatusResponse> {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const url = new URL('/api/v1/auth/status', mcpdUrl);
|
|
||||||
const opts: http.RequestOptions = {
|
|
||||||
hostname: url.hostname,
|
|
||||||
port: url.port,
|
|
||||||
path: url.pathname,
|
|
||||||
method: 'GET',
|
|
||||||
timeout: 10000,
|
|
||||||
headers: { 'Content-Type': 'application/json' },
|
|
||||||
};
|
|
||||||
const req = http.request(opts, (res) => {
|
|
||||||
const chunks: Buffer[] = [];
|
|
||||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
|
||||||
res.on('end', () => {
|
|
||||||
const raw = Buffer.concat(chunks).toString('utf-8');
|
|
||||||
if ((res.statusCode ?? 0) >= 400) {
|
|
||||||
reject(new Error(`Status check failed (${res.statusCode}): ${raw}`));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
resolve(JSON.parse(raw) as StatusResponse);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on('error', (err) => reject(new Error(`Cannot reach mcpd: ${err.message}`)));
|
|
||||||
req.on('timeout', () => { req.destroy(); reject(new Error('Status request timed out')); });
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function defaultBootstrapRequest(mcpdUrl: string, email: string, password: string, name?: string): Promise<LoginResponse> {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const url = new URL('/api/v1/auth/bootstrap', mcpdUrl);
|
|
||||||
const payload: Record<string, string> = { email, password };
|
|
||||||
if (name) {
|
|
||||||
payload['name'] = name;
|
|
||||||
}
|
|
||||||
const body = JSON.stringify(payload);
|
|
||||||
const opts: http.RequestOptions = {
|
|
||||||
hostname: url.hostname,
|
|
||||||
port: url.port,
|
|
||||||
path: url.pathname,
|
|
||||||
method: 'POST',
|
|
||||||
timeout: 10000,
|
|
||||||
headers: { 'Content-Type': 'application/json', 'Content-Length': Buffer.byteLength(body) },
|
|
||||||
};
|
|
||||||
const req = http.request(opts, (res) => {
|
|
||||||
const chunks: Buffer[] = [];
|
|
||||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
|
||||||
res.on('end', () => {
|
|
||||||
const raw = Buffer.concat(chunks).toString('utf-8');
|
|
||||||
if ((res.statusCode ?? 0) >= 400) {
|
|
||||||
reject(new Error(`Bootstrap failed (${res.statusCode}): ${raw}`));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
resolve(JSON.parse(raw) as LoginResponse);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on('error', (err) => reject(new Error(`Cannot reach mcpd: ${err.message}`)));
|
|
||||||
req.on('timeout', () => { req.destroy(); reject(new Error('Bootstrap request timed out')); });
|
|
||||||
req.write(body);
|
|
||||||
req.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function defaultInput(message: string): Promise<string> {
|
|
||||||
const { default: inquirer } = await import('inquirer');
|
|
||||||
const { answer } = await inquirer.prompt([{ type: 'input', name: 'answer', message }]);
|
|
||||||
return answer as string;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function defaultPassword(message: string): Promise<string> {
|
|
||||||
const { default: inquirer } = await import('inquirer');
|
|
||||||
const { answer } = await inquirer.prompt([{ type: 'password', name: 'answer', message }]);
|
|
||||||
return answer as string;
|
|
||||||
}
|
|
||||||
|
|
||||||
const defaultDeps: AuthCommandDeps = {
|
|
||||||
configDeps: {},
|
|
||||||
credentialsDeps: {},
|
|
||||||
prompt: { input: defaultInput, password: defaultPassword },
|
|
||||||
log: (...args) => console.log(...args),
|
|
||||||
loginRequest: defaultLoginRequest,
|
|
||||||
logoutRequest: defaultLogoutRequest,
|
|
||||||
statusRequest: defaultStatusRequest,
|
|
||||||
bootstrapRequest: defaultBootstrapRequest,
|
|
||||||
};
|
|
||||||
|
|
||||||
export function createLoginCommand(deps?: Partial<AuthCommandDeps>): Command {
|
|
||||||
const { configDeps, credentialsDeps, prompt, log, loginRequest, statusRequest, bootstrapRequest } = { ...defaultDeps, ...deps };
|
|
||||||
|
|
||||||
return new Command('login')
|
|
||||||
.description('Authenticate with mcpd')
|
|
||||||
.option('--mcpd-url <url>', 'mcpd URL to authenticate against')
|
|
||||||
.action(async (opts: { mcpdUrl?: string }) => {
|
|
||||||
const config = loadConfig(configDeps);
|
|
||||||
const mcpdUrl = opts.mcpdUrl ?? config.mcpdUrl;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const status = await statusRequest(mcpdUrl);
|
|
||||||
|
|
||||||
if (!status.hasUsers) {
|
|
||||||
log('No users configured. Creating first admin account.');
|
|
||||||
const email = await prompt.input('Email:');
|
|
||||||
const password = await prompt.password('Password:');
|
|
||||||
const name = await prompt.input('Name (optional):');
|
|
||||||
|
|
||||||
const result = name
|
|
||||||
? await bootstrapRequest(mcpdUrl, email, password, name)
|
|
||||||
: await bootstrapRequest(mcpdUrl, email, password);
|
|
||||||
saveCredentials({
|
|
||||||
token: result.token,
|
|
||||||
mcpdUrl,
|
|
||||||
user: result.user.email,
|
|
||||||
}, credentialsDeps);
|
|
||||||
log(`Logged in as ${result.user.email} (admin)`);
|
|
||||||
} else {
|
|
||||||
const email = await prompt.input('Email:');
|
|
||||||
const password = await prompt.password('Password:');
|
|
||||||
|
|
||||||
const result = await loginRequest(mcpdUrl, email, password);
|
|
||||||
saveCredentials({
|
|
||||||
token: result.token,
|
|
||||||
mcpdUrl,
|
|
||||||
user: result.user.email,
|
|
||||||
}, credentialsDeps);
|
|
||||||
log(`Logged in as ${result.user.email}`);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
log(`Login failed: ${(err as Error).message}`);
|
|
||||||
process.exitCode = 1;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createLogoutCommand(deps?: Partial<AuthCommandDeps>): Command {
|
|
||||||
const { credentialsDeps, log, logoutRequest } = { ...defaultDeps, ...deps };
|
|
||||||
|
|
||||||
return new Command('logout')
|
|
||||||
.description('Log out and remove stored credentials')
|
|
||||||
.action(async () => {
|
|
||||||
const creds = loadCredentials(credentialsDeps);
|
|
||||||
if (!creds) {
|
|
||||||
log('Not logged in');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
await logoutRequest(creds.mcpdUrl, creds.token);
|
|
||||||
deleteCredentials(credentialsDeps);
|
|
||||||
log('Logged out successfully');
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
import { Command } from 'commander';
|
|
||||||
import fs from 'node:fs';
|
|
||||||
import type { ApiClient } from '../api-client.js';
|
|
||||||
|
|
||||||
export interface BackupDeps {
|
|
||||||
client: ApiClient;
|
|
||||||
log: (...args: unknown[]) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createBackupCommand(deps: BackupDeps): Command {
|
|
||||||
const cmd = new Command('backup')
|
|
||||||
.description('Backup mcpctl configuration to a JSON file')
|
|
||||||
.option('-o, --output <path>', 'output file path', 'mcpctl-backup.json')
|
|
||||||
.option('-p, --password <password>', 'encrypt sensitive values with password')
|
|
||||||
.option('-r, --resources <types>', 'resource types to backup (comma-separated: servers,profiles,projects)')
|
|
||||||
.action(async (options: { output: string; password?: string; resources?: string }) => {
|
|
||||||
const body: Record<string, unknown> = {};
|
|
||||||
if (options.password) {
|
|
||||||
body.password = options.password;
|
|
||||||
}
|
|
||||||
if (options.resources) {
|
|
||||||
body.resources = options.resources.split(',').map((s) => s.trim());
|
|
||||||
}
|
|
||||||
|
|
||||||
const bundle = await deps.client.post('/api/v1/backup', body);
|
|
||||||
fs.writeFileSync(options.output, JSON.stringify(bundle, null, 2), 'utf-8');
|
|
||||||
deps.log(`Backup saved to ${options.output}`);
|
|
||||||
});
|
|
||||||
|
|
||||||
return cmd;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createRestoreCommand(deps: BackupDeps): Command {
|
|
||||||
const cmd = new Command('restore')
|
|
||||||
.description('Restore mcpctl configuration from a backup file')
|
|
||||||
.option('-i, --input <path>', 'backup file path', 'mcpctl-backup.json')
|
|
||||||
.option('-p, --password <password>', 'decryption password for encrypted backups')
|
|
||||||
.option('-c, --conflict <strategy>', 'conflict resolution: skip, overwrite, fail', 'skip')
|
|
||||||
.action(async (options: { input: string; password?: string; conflict: string }) => {
|
|
||||||
if (!fs.existsSync(options.input)) {
|
|
||||||
deps.log(`Error: File not found: ${options.input}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const raw = fs.readFileSync(options.input, 'utf-8');
|
|
||||||
const bundle = JSON.parse(raw) as unknown;
|
|
||||||
|
|
||||||
const body: Record<string, unknown> = {
|
|
||||||
bundle,
|
|
||||||
conflictStrategy: options.conflict,
|
|
||||||
};
|
|
||||||
if (options.password) {
|
|
||||||
body.password = options.password;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await deps.client.post<{
|
|
||||||
serversCreated: number;
|
|
||||||
serversSkipped: number;
|
|
||||||
profilesCreated: number;
|
|
||||||
profilesSkipped: number;
|
|
||||||
projectsCreated: number;
|
|
||||||
projectsSkipped: number;
|
|
||||||
errors: string[];
|
|
||||||
}>('/api/v1/restore', body);
|
|
||||||
|
|
||||||
deps.log('Restore complete:');
|
|
||||||
deps.log(` Servers: ${result.serversCreated} created, ${result.serversSkipped} skipped`);
|
|
||||||
deps.log(` Profiles: ${result.profilesCreated} created, ${result.profilesSkipped} skipped`);
|
|
||||||
deps.log(` Projects: ${result.projectsCreated} created, ${result.projectsSkipped} skipped`);
|
|
||||||
|
|
||||||
if (result.errors.length > 0) {
|
|
||||||
deps.log(` Errors:`);
|
|
||||||
for (const err of result.errors) {
|
|
||||||
deps.log(` - ${err}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return cmd;
|
|
||||||
}
|
|
||||||
@@ -1,35 +1,19 @@
|
|||||||
import { Command } from 'commander';
|
import { Command } from 'commander';
|
||||||
import { writeFileSync, readFileSync, existsSync } from 'node:fs';
|
|
||||||
import { resolve, join } from 'node:path';
|
|
||||||
import { homedir } from 'node:os';
|
|
||||||
import { loadConfig, saveConfig, mergeConfig, getConfigPath, DEFAULT_CONFIG } from '../config/index.js';
|
import { loadConfig, saveConfig, mergeConfig, getConfigPath, DEFAULT_CONFIG } from '../config/index.js';
|
||||||
import type { McpctlConfig, ConfigLoaderDeps } from '../config/index.js';
|
import type { McpctlConfig, ConfigLoaderDeps } from '../config/index.js';
|
||||||
import { formatJson, formatYaml } from '../formatters/index.js';
|
import { formatJson, formatYaml } from '../formatters/index.js';
|
||||||
import { saveCredentials, loadCredentials } from '../auth/index.js';
|
|
||||||
import type { CredentialsDeps, StoredCredentials } from '../auth/index.js';
|
|
||||||
import type { ApiClient } from '../api-client.js';
|
|
||||||
|
|
||||||
interface McpConfig {
|
|
||||||
mcpServers: Record<string, { command: string; args: string[]; env?: Record<string, string> }>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface ConfigCommandDeps {
|
export interface ConfigCommandDeps {
|
||||||
configDeps: Partial<ConfigLoaderDeps>;
|
configDeps: Partial<ConfigLoaderDeps>;
|
||||||
log: (...args: string[]) => void;
|
log: (...args: string[]) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ConfigApiDeps {
|
|
||||||
client: ApiClient;
|
|
||||||
credentialsDeps: Partial<CredentialsDeps>;
|
|
||||||
log: (...args: string[]) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
const defaultDeps: ConfigCommandDeps = {
|
const defaultDeps: ConfigCommandDeps = {
|
||||||
configDeps: {},
|
configDeps: {},
|
||||||
log: (...args) => console.log(...args),
|
log: (...args) => console.log(...args),
|
||||||
};
|
};
|
||||||
|
|
||||||
export function createConfigCommand(deps?: Partial<ConfigCommandDeps>, apiDeps?: ConfigApiDeps): Command {
|
export function createConfigCommand(deps?: Partial<ConfigCommandDeps>): Command {
|
||||||
const { configDeps, log } = { ...defaultDeps, ...deps };
|
const { configDeps, log } = { ...defaultDeps, ...deps };
|
||||||
|
|
||||||
const config = new Command('config').description('Manage mcpctl configuration');
|
const config = new Command('config').description('Manage mcpctl configuration');
|
||||||
@@ -57,9 +41,6 @@ export function createConfigCommand(deps?: Partial<ConfigCommandDeps>, apiDeps?:
|
|||||||
updates[key] = parseInt(value, 10);
|
updates[key] = parseInt(value, 10);
|
||||||
} else if (key === 'registries') {
|
} else if (key === 'registries') {
|
||||||
updates[key] = value.split(',').map((s) => s.trim());
|
updates[key] = value.split(',').map((s) => s.trim());
|
||||||
} else if (key === 'daemonUrl') {
|
|
||||||
// Backward compat: map daemonUrl to mcplocalUrl
|
|
||||||
updates['mcplocalUrl'] = value;
|
|
||||||
} else {
|
} else {
|
||||||
updates[key] = value;
|
updates[key] = value;
|
||||||
}
|
}
|
||||||
@@ -84,115 +65,5 @@ export function createConfigCommand(deps?: Partial<ConfigCommandDeps>, apiDeps?:
|
|||||||
log('Configuration reset to defaults');
|
log('Configuration reset to defaults');
|
||||||
});
|
});
|
||||||
|
|
||||||
if (apiDeps) {
|
|
||||||
const { client, credentialsDeps, log: apiLog } = apiDeps;
|
|
||||||
|
|
||||||
config
|
|
||||||
.command('claude-generate')
|
|
||||||
.description('Generate .mcp.json from a project configuration')
|
|
||||||
.requiredOption('--project <name>', 'Project name')
|
|
||||||
.option('-o, --output <path>', 'Output file path', '.mcp.json')
|
|
||||||
.option('--merge', 'Merge with existing .mcp.json instead of overwriting')
|
|
||||||
.option('--stdout', 'Print to stdout instead of writing a file')
|
|
||||||
.action(async (opts: { project: string; output: string; merge?: boolean; stdout?: boolean }) => {
|
|
||||||
const mcpConfig = await client.get<McpConfig>(`/api/v1/projects/${opts.project}/mcp-config`);
|
|
||||||
|
|
||||||
if (opts.stdout) {
|
|
||||||
apiLog(JSON.stringify(mcpConfig, null, 2));
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const outputPath = resolve(opts.output);
|
|
||||||
let finalConfig = mcpConfig;
|
|
||||||
|
|
||||||
if (opts.merge && existsSync(outputPath)) {
|
|
||||||
try {
|
|
||||||
const existing = JSON.parse(readFileSync(outputPath, 'utf-8')) as McpConfig;
|
|
||||||
finalConfig = {
|
|
||||||
mcpServers: {
|
|
||||||
...existing.mcpServers,
|
|
||||||
...mcpConfig.mcpServers,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
} catch {
|
|
||||||
// If existing file is invalid, just overwrite
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
writeFileSync(outputPath, JSON.stringify(finalConfig, null, 2) + '\n');
|
|
||||||
const serverCount = Object.keys(finalConfig.mcpServers).length;
|
|
||||||
apiLog(`Wrote ${outputPath} (${serverCount} server(s))`);
|
|
||||||
});
|
|
||||||
|
|
||||||
config
|
|
||||||
.command('impersonate')
|
|
||||||
.description('Impersonate another user or return to original identity')
|
|
||||||
.argument('[email]', 'Email of user to impersonate')
|
|
||||||
.option('--quit', 'Stop impersonating and return to original identity')
|
|
||||||
.action(async (email: string | undefined, opts: { quit?: boolean }) => {
|
|
||||||
const configDir = credentialsDeps?.configDir ?? join(homedir(), '.mcpctl');
|
|
||||||
const backupPath = join(configDir, 'credentials-backup');
|
|
||||||
|
|
||||||
if (opts.quit) {
|
|
||||||
if (!existsSync(backupPath)) {
|
|
||||||
apiLog('No impersonation session to quit');
|
|
||||||
process.exitCode = 1;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const backupRaw = readFileSync(backupPath, 'utf-8');
|
|
||||||
const backup = JSON.parse(backupRaw) as StoredCredentials;
|
|
||||||
saveCredentials(backup, credentialsDeps);
|
|
||||||
|
|
||||||
// Remove backup file
|
|
||||||
const { unlinkSync } = await import('node:fs');
|
|
||||||
unlinkSync(backupPath);
|
|
||||||
|
|
||||||
apiLog(`Returned to ${backup.user}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!email) {
|
|
||||||
apiLog('Email is required when not using --quit');
|
|
||||||
process.exitCode = 1;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Save current credentials as backup
|
|
||||||
const currentCreds = loadCredentials(credentialsDeps);
|
|
||||||
if (!currentCreds) {
|
|
||||||
apiLog('Not logged in. Run "mcpctl login" first.');
|
|
||||||
process.exitCode = 1;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
writeFileSync(backupPath, JSON.stringify(currentCreds, null, 2) + '\n', 'utf-8');
|
|
||||||
|
|
||||||
try {
|
|
||||||
const result = await client.post<{ token: string; user: { email: string } }>(
|
|
||||||
'/api/v1/auth/impersonate',
|
|
||||||
{ email },
|
|
||||||
);
|
|
||||||
|
|
||||||
saveCredentials({
|
|
||||||
token: result.token,
|
|
||||||
mcpdUrl: currentCreds.mcpdUrl,
|
|
||||||
user: result.user.email,
|
|
||||||
}, credentialsDeps);
|
|
||||||
|
|
||||||
apiLog(`Impersonating ${result.user.email}. Use 'mcpctl config impersonate --quit' to return.`);
|
|
||||||
} catch (err) {
|
|
||||||
// Restore backup on failure
|
|
||||||
const backup = JSON.parse(readFileSync(backupPath, 'utf-8')) as StoredCredentials;
|
|
||||||
saveCredentials(backup, credentialsDeps);
|
|
||||||
const { unlinkSync } = await import('node:fs');
|
|
||||||
unlinkSync(backupPath);
|
|
||||||
|
|
||||||
apiLog(`Impersonate failed: ${(err as Error).message}`);
|
|
||||||
process.exitCode = 1;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return config;
|
return config;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,351 +0,0 @@
|
|||||||
import { Command } from 'commander';
|
|
||||||
import { type ApiClient, ApiError } from '../api-client.js';
|
|
||||||
export interface CreateCommandDeps {
|
|
||||||
client: ApiClient;
|
|
||||||
log: (...args: unknown[]) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
function collect(value: string, prev: string[]): string[] {
|
|
||||||
return [...prev, value];
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ServerEnvEntry {
|
|
||||||
name: string;
|
|
||||||
value?: string;
|
|
||||||
valueFrom?: { secretRef: { name: string; key: string } };
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseServerEnv(entries: string[]): ServerEnvEntry[] {
|
|
||||||
return entries.map((entry) => {
|
|
||||||
const eqIdx = entry.indexOf('=');
|
|
||||||
if (eqIdx === -1) {
|
|
||||||
throw new Error(`Invalid env format '${entry}'. Expected KEY=value or KEY=secretRef:SECRET:KEY`);
|
|
||||||
}
|
|
||||||
const envName = entry.slice(0, eqIdx);
|
|
||||||
const rhs = entry.slice(eqIdx + 1);
|
|
||||||
|
|
||||||
if (rhs.startsWith('secretRef:')) {
|
|
||||||
const parts = rhs.split(':');
|
|
||||||
if (parts.length !== 3) {
|
|
||||||
throw new Error(`Invalid secretRef format '${entry}'. Expected KEY=secretRef:SECRET_NAME:SECRET_KEY`);
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
name: envName,
|
|
||||||
valueFrom: { secretRef: { name: parts[1]!, key: parts[2]! } },
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return { name: envName, value: rhs };
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseEnvEntries(entries: string[]): Record<string, string> {
|
|
||||||
const result: Record<string, string> = {};
|
|
||||||
for (const entry of entries) {
|
|
||||||
const eqIdx = entry.indexOf('=');
|
|
||||||
if (eqIdx === -1) {
|
|
||||||
throw new Error(`Invalid env format '${entry}'. Expected KEY=value`);
|
|
||||||
}
|
|
||||||
result[entry.slice(0, eqIdx)] = entry.slice(eqIdx + 1);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createCreateCommand(deps: CreateCommandDeps): Command {
|
|
||||||
const { client, log } = deps;
|
|
||||||
|
|
||||||
const cmd = new Command('create')
|
|
||||||
.description('Create a resource (server, secret, project, user, group, rbac)');
|
|
||||||
|
|
||||||
// --- create server ---
|
|
||||||
cmd.command('server')
|
|
||||||
.description('Create an MCP server definition')
|
|
||||||
.argument('<name>', 'Server name (lowercase, hyphens allowed)')
|
|
||||||
.option('-d, --description <text>', 'Server description')
|
|
||||||
.option('--package-name <name>', 'NPM package name')
|
|
||||||
.option('--docker-image <image>', 'Docker image')
|
|
||||||
.option('--transport <type>', 'Transport type (STDIO, SSE, STREAMABLE_HTTP)')
|
|
||||||
.option('--repository-url <url>', 'Source repository URL')
|
|
||||||
.option('--external-url <url>', 'External endpoint URL')
|
|
||||||
.option('--command <arg>', 'Command argument (repeat for multiple)', collect, [])
|
|
||||||
.option('--container-port <port>', 'Container port number')
|
|
||||||
.option('--replicas <count>', 'Number of replicas')
|
|
||||||
.option('--env <entry>', 'Env var: KEY=value (inline) or KEY=secretRef:SECRET:KEY (secret ref, repeat for multiple)', collect, [])
|
|
||||||
.option('--from-template <name>', 'Create from template (name or name:version)')
|
|
||||||
.option('--force', 'Update if already exists')
|
|
||||||
.action(async (name: string, opts) => {
|
|
||||||
let base: Record<string, unknown> = {};
|
|
||||||
|
|
||||||
// If --from-template, fetch template and use as base
|
|
||||||
if (opts.fromTemplate) {
|
|
||||||
const tplRef = opts.fromTemplate as string;
|
|
||||||
const [tplName, tplVersion] = tplRef.includes(':')
|
|
||||||
? [tplRef.slice(0, tplRef.indexOf(':')), tplRef.slice(tplRef.indexOf(':') + 1)]
|
|
||||||
: [tplRef, undefined];
|
|
||||||
|
|
||||||
const templates = await client.get<Array<Record<string, unknown>>>(`/api/v1/templates?name=${encodeURIComponent(tplName)}`);
|
|
||||||
let template: Record<string, unknown> | undefined;
|
|
||||||
if (tplVersion) {
|
|
||||||
template = templates.find((t) => t.name === tplName && t.version === tplVersion);
|
|
||||||
if (!template) throw new Error(`Template '${tplName}' version '${tplVersion}' not found`);
|
|
||||||
} else {
|
|
||||||
template = templates.find((t) => t.name === tplName);
|
|
||||||
if (!template) throw new Error(`Template '${tplName}' not found`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Copy template fields as base (strip template-only, internal, and null fields)
|
|
||||||
const { id: _id, createdAt: _c, updatedAt: _u, version: _v, name: _n, ...tplFields } = template;
|
|
||||||
base = {};
|
|
||||||
for (const [k, v] of Object.entries(tplFields)) {
|
|
||||||
if (v !== null && v !== undefined) base[k] = v;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Convert template env (description/required) to server env (name/value/valueFrom)
|
|
||||||
const tplEnv = template.env as Array<{ name: string; description?: string; required?: boolean; defaultValue?: string }> | undefined;
|
|
||||||
if (tplEnv && tplEnv.length > 0) {
|
|
||||||
base.env = tplEnv.map((e) => ({ name: e.name, value: e.defaultValue ?? '' }));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Track template origin
|
|
||||||
base.templateName = tplName;
|
|
||||||
base.templateVersion = (template.version as string) ?? '1.0.0';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build body: template base → CLI overrides (last wins)
|
|
||||||
const body: Record<string, unknown> = {
|
|
||||||
...base,
|
|
||||||
name,
|
|
||||||
};
|
|
||||||
if (opts.description !== undefined) body.description = opts.description;
|
|
||||||
if (opts.transport) body.transport = opts.transport;
|
|
||||||
if (opts.replicas) body.replicas = parseInt(opts.replicas, 10);
|
|
||||||
if (opts.packageName) body.packageName = opts.packageName;
|
|
||||||
if (opts.dockerImage) body.dockerImage = opts.dockerImage;
|
|
||||||
if (opts.repositoryUrl) body.repositoryUrl = opts.repositoryUrl;
|
|
||||||
if (opts.externalUrl) body.externalUrl = opts.externalUrl;
|
|
||||||
if (opts.command.length > 0) body.command = opts.command;
|
|
||||||
if (opts.containerPort) body.containerPort = parseInt(opts.containerPort, 10);
|
|
||||||
if (opts.env.length > 0) {
|
|
||||||
// Merge: CLI env entries override template env entries by name
|
|
||||||
const cliEnv = parseServerEnv(opts.env);
|
|
||||||
const existing = (body.env as ServerEnvEntry[] | undefined) ?? [];
|
|
||||||
const merged = [...existing];
|
|
||||||
for (const entry of cliEnv) {
|
|
||||||
const idx = merged.findIndex((e) => e.name === entry.name);
|
|
||||||
if (idx >= 0) {
|
|
||||||
merged[idx] = entry;
|
|
||||||
} else {
|
|
||||||
merged.push(entry);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
body.env = merged;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Defaults when no template
|
|
||||||
if (!opts.fromTemplate) {
|
|
||||||
if (body.description === undefined) body.description = '';
|
|
||||||
if (!body.transport) body.transport = 'STDIO';
|
|
||||||
if (!body.replicas) body.replicas = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const server = await client.post<{ id: string; name: string }>('/api/v1/servers', body);
|
|
||||||
log(`server '${server.name}' created (id: ${server.id})`);
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof ApiError && err.status === 409 && opts.force) {
|
|
||||||
const existing = (await client.get<Array<{ id: string; name: string }>>('/api/v1/servers')).find((s) => s.name === name);
|
|
||||||
if (!existing) throw err;
|
|
||||||
const { name: _n, ...updateBody } = body;
|
|
||||||
await client.put(`/api/v1/servers/${existing.id}`, updateBody);
|
|
||||||
log(`server '${name}' updated (id: ${existing.id})`);
|
|
||||||
} else {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- create secret ---
|
|
||||||
cmd.command('secret')
|
|
||||||
.description('Create a secret')
|
|
||||||
.argument('<name>', 'Secret name (lowercase, hyphens allowed)')
|
|
||||||
.option('--data <entry>', 'Secret data KEY=value (repeat for multiple)', collect, [])
|
|
||||||
.option('--force', 'Update if already exists')
|
|
||||||
.action(async (name: string, opts) => {
|
|
||||||
const data = parseEnvEntries(opts.data);
|
|
||||||
try {
|
|
||||||
const secret = await client.post<{ id: string; name: string }>('/api/v1/secrets', {
|
|
||||||
name,
|
|
||||||
data,
|
|
||||||
});
|
|
||||||
log(`secret '${secret.name}' created (id: ${secret.id})`);
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof ApiError && err.status === 409 && opts.force) {
|
|
||||||
const existing = (await client.get<Array<{ id: string; name: string }>>('/api/v1/secrets')).find((s) => s.name === name);
|
|
||||||
if (!existing) throw err;
|
|
||||||
await client.put(`/api/v1/secrets/${existing.id}`, { data });
|
|
||||||
log(`secret '${name}' updated (id: ${existing.id})`);
|
|
||||||
} else {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- create project ---
|
|
||||||
cmd.command('project')
|
|
||||||
.description('Create a project')
|
|
||||||
.argument('<name>', 'Project name')
|
|
||||||
.option('-d, --description <text>', 'Project description', '')
|
|
||||||
.option('--proxy-mode <mode>', 'Proxy mode (direct, filtered)')
|
|
||||||
.option('--proxy-mode-llm-provider <name>', 'LLM provider name (for filtered proxy mode)')
|
|
||||||
.option('--proxy-mode-llm-model <name>', 'LLM model name (for filtered proxy mode)')
|
|
||||||
.option('--server <name>', 'Server name (repeat for multiple)', collect, [])
|
|
||||||
.option('--force', 'Update if already exists')
|
|
||||||
.action(async (name: string, opts) => {
|
|
||||||
const body: Record<string, unknown> = {
|
|
||||||
name,
|
|
||||||
description: opts.description,
|
|
||||||
proxyMode: opts.proxyMode ?? 'direct',
|
|
||||||
};
|
|
||||||
if (opts.proxyModeLlmProvider) body.llmProvider = opts.proxyModeLlmProvider;
|
|
||||||
if (opts.proxyModeLlmModel) body.llmModel = opts.proxyModeLlmModel;
|
|
||||||
if (opts.server.length > 0) body.servers = opts.server;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const project = await client.post<{ id: string; name: string }>('/api/v1/projects', body);
|
|
||||||
log(`project '${project.name}' created (id: ${project.id})`);
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof ApiError && err.status === 409 && opts.force) {
|
|
||||||
const existing = (await client.get<Array<{ id: string; name: string }>>('/api/v1/projects')).find((p) => p.name === name);
|
|
||||||
if (!existing) throw err;
|
|
||||||
const { name: _n, ...updateBody } = body;
|
|
||||||
await client.put(`/api/v1/projects/${existing.id}`, updateBody);
|
|
||||||
log(`project '${name}' updated (id: ${existing.id})`);
|
|
||||||
} else {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- create user ---
|
|
||||||
cmd.command('user')
|
|
||||||
.description('Create a user')
|
|
||||||
.argument('<email>', 'User email address')
|
|
||||||
.option('--password <pass>', 'User password')
|
|
||||||
.option('--name <name>', 'User display name')
|
|
||||||
.option('--force', 'Update if already exists')
|
|
||||||
.action(async (email: string, opts) => {
|
|
||||||
if (!opts.password) {
|
|
||||||
throw new Error('--password is required');
|
|
||||||
}
|
|
||||||
const body: Record<string, unknown> = {
|
|
||||||
email,
|
|
||||||
password: opts.password,
|
|
||||||
};
|
|
||||||
if (opts.name) body.name = opts.name;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const user = await client.post<{ id: string; email: string }>('/api/v1/users', body);
|
|
||||||
log(`user '${user.email}' created (id: ${user.id})`);
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof ApiError && err.status === 409 && opts.force) {
|
|
||||||
const existing = (await client.get<Array<{ id: string; email: string }>>('/api/v1/users')).find((u) => u.email === email);
|
|
||||||
if (!existing) throw err;
|
|
||||||
const { email: _e, ...updateBody } = body;
|
|
||||||
await client.put(`/api/v1/users/${existing.id}`, updateBody);
|
|
||||||
log(`user '${email}' updated (id: ${existing.id})`);
|
|
||||||
} else {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- create group ---
|
|
||||||
cmd.command('group')
|
|
||||||
.description('Create a group')
|
|
||||||
.argument('<name>', 'Group name')
|
|
||||||
.option('--description <text>', 'Group description')
|
|
||||||
.option('--member <email>', 'Member email (repeat for multiple)', collect, [])
|
|
||||||
.option('--force', 'Update if already exists')
|
|
||||||
.action(async (name: string, opts) => {
|
|
||||||
const body: Record<string, unknown> = {
|
|
||||||
name,
|
|
||||||
members: opts.member,
|
|
||||||
};
|
|
||||||
if (opts.description) body.description = opts.description;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const group = await client.post<{ id: string; name: string }>('/api/v1/groups', body);
|
|
||||||
log(`group '${group.name}' created (id: ${group.id})`);
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof ApiError && err.status === 409 && opts.force) {
|
|
||||||
const existing = (await client.get<Array<{ id: string; name: string }>>('/api/v1/groups')).find((g) => g.name === name);
|
|
||||||
if (!existing) throw err;
|
|
||||||
const { name: _n, ...updateBody } = body;
|
|
||||||
await client.put(`/api/v1/groups/${existing.id}`, updateBody);
|
|
||||||
log(`group '${name}' updated (id: ${existing.id})`);
|
|
||||||
} else {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// --- create rbac ---
|
|
||||||
cmd.command('rbac')
|
|
||||||
.description('Create an RBAC binding definition')
|
|
||||||
.argument('<name>', 'RBAC binding name')
|
|
||||||
.option('--subject <entry>', 'Subject as Kind:name (repeat for multiple)', collect, [])
|
|
||||||
.option('--binding <entry>', 'Role binding as role:resource (e.g. edit:servers, run:projects)', collect, [])
|
|
||||||
.option('--operation <action>', 'Operation binding (e.g. logs, backup)', collect, [])
|
|
||||||
.option('--force', 'Update if already exists')
|
|
||||||
.action(async (name: string, opts) => {
|
|
||||||
const subjects = (opts.subject as string[]).map((entry: string) => {
|
|
||||||
const colonIdx = entry.indexOf(':');
|
|
||||||
if (colonIdx === -1) {
|
|
||||||
throw new Error(`Invalid subject format '${entry}'. Expected Kind:name (e.g. User:alice@example.com)`);
|
|
||||||
}
|
|
||||||
return { kind: entry.slice(0, colonIdx), name: entry.slice(colonIdx + 1) };
|
|
||||||
});
|
|
||||||
|
|
||||||
const roleBindings: Array<Record<string, string>> = [];
|
|
||||||
|
|
||||||
// Resource bindings from --binding flag (role:resource or role:resource:name)
|
|
||||||
for (const entry of opts.binding as string[]) {
|
|
||||||
const parts = entry.split(':');
|
|
||||||
if (parts.length === 2) {
|
|
||||||
roleBindings.push({ role: parts[0]!, resource: parts[1]! });
|
|
||||||
} else if (parts.length === 3) {
|
|
||||||
roleBindings.push({ role: parts[0]!, resource: parts[1]!, name: parts[2]! });
|
|
||||||
} else {
|
|
||||||
throw new Error(`Invalid binding format '${entry}'. Expected role:resource or role:resource:name (e.g. edit:servers, view:servers:my-ha)`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Operation bindings from --operation flag
|
|
||||||
for (const action of opts.operation as string[]) {
|
|
||||||
roleBindings.push({ role: 'run', action });
|
|
||||||
}
|
|
||||||
|
|
||||||
const body: Record<string, unknown> = {
|
|
||||||
name,
|
|
||||||
subjects,
|
|
||||||
roleBindings,
|
|
||||||
};
|
|
||||||
|
|
||||||
try {
|
|
||||||
const rbac = await client.post<{ id: string; name: string }>('/api/v1/rbac', body);
|
|
||||||
log(`rbac '${rbac.name}' created (id: ${rbac.id})`);
|
|
||||||
} catch (err) {
|
|
||||||
if (err instanceof ApiError && err.status === 409 && opts.force) {
|
|
||||||
const existing = (await client.get<Array<{ id: string; name: string }>>('/api/v1/rbac')).find((r) => r.name === name);
|
|
||||||
if (!existing) throw err;
|
|
||||||
const { name: _n, ...updateBody } = body;
|
|
||||||
await client.put(`/api/v1/rbac/${existing.id}`, updateBody);
|
|
||||||
log(`rbac '${name}' updated (id: ${existing.id})`);
|
|
||||||
} else {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return cmd;
|
|
||||||
}
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
import { Command } from 'commander';
|
|
||||||
import type { ApiClient } from '../api-client.js';
|
|
||||||
import { resolveResource, resolveNameOrId } from './shared.js';
|
|
||||||
|
|
||||||
export interface DeleteCommandDeps {
|
|
||||||
client: ApiClient;
|
|
||||||
log: (...args: unknown[]) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createDeleteCommand(deps: DeleteCommandDeps): Command {
|
|
||||||
const { client, log } = deps;
|
|
||||||
|
|
||||||
return new Command('delete')
|
|
||||||
.description('Delete a resource (server, instance, secret, project, user, group, rbac)')
|
|
||||||
.argument('<resource>', 'resource type')
|
|
||||||
.argument('<id>', 'resource ID or name')
|
|
||||||
.action(async (resourceArg: string, idOrName: string) => {
|
|
||||||
const resource = resolveResource(resourceArg);
|
|
||||||
|
|
||||||
// Resolve name → ID for any resource type
|
|
||||||
let id: string;
|
|
||||||
try {
|
|
||||||
id = await resolveNameOrId(client, resource, idOrName);
|
|
||||||
} catch {
|
|
||||||
id = idOrName; // Fall through with original
|
|
||||||
}
|
|
||||||
|
|
||||||
await client.delete(`/api/v1/${resource}/${id}`);
|
|
||||||
|
|
||||||
const singular = resource.replace(/s$/, '');
|
|
||||||
log(`${singular} '${idOrName}' deleted.`);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -1,618 +0,0 @@
|
|||||||
import { Command } from 'commander';
|
|
||||||
import { formatJson, formatYaml } from '../formatters/output.js';
|
|
||||||
import { resolveResource, resolveNameOrId } from './shared.js';
|
|
||||||
import type { ApiClient } from '../api-client.js';
|
|
||||||
|
|
||||||
export interface DescribeCommandDeps {
|
|
||||||
client: ApiClient;
|
|
||||||
fetchResource: (resource: string, id: string) => Promise<unknown>;
|
|
||||||
fetchInspect?: (id: string) => Promise<unknown>;
|
|
||||||
log: (...args: string[]) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
function pad(label: string, width = 18): string {
|
|
||||||
return label.padEnd(width);
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatServerDetail(server: Record<string, unknown>): string {
|
|
||||||
const lines: string[] = [];
|
|
||||||
lines.push(`=== Server: ${server.name} ===`);
|
|
||||||
lines.push(`${pad('Name:')}${server.name}`);
|
|
||||||
lines.push(`${pad('Transport:')}${server.transport ?? '-'}`);
|
|
||||||
lines.push(`${pad('Replicas:')}${server.replicas ?? 1}`);
|
|
||||||
if (server.dockerImage) lines.push(`${pad('Docker Image:')}${server.dockerImage}`);
|
|
||||||
if (server.packageName) lines.push(`${pad('Package:')}${server.packageName}`);
|
|
||||||
if (server.externalUrl) lines.push(`${pad('External URL:')}${server.externalUrl}`);
|
|
||||||
if (server.repositoryUrl) lines.push(`${pad('Repository:')}${server.repositoryUrl}`);
|
|
||||||
if (server.containerPort) lines.push(`${pad('Container Port:')}${server.containerPort}`);
|
|
||||||
if (server.description) lines.push(`${pad('Description:')}${server.description}`);
|
|
||||||
|
|
||||||
const command = server.command as string[] | null;
|
|
||||||
if (command && command.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Command:');
|
|
||||||
lines.push(` ${command.join(' ')}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const env = server.env as Array<{ name: string; value?: string; valueFrom?: { secretRef: { name: string; key: string } } }> | undefined;
|
|
||||||
if (env && env.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Environment:');
|
|
||||||
const nameW = Math.max(6, ...env.map((e) => e.name.length)) + 2;
|
|
||||||
lines.push(` ${'NAME'.padEnd(nameW)}SOURCE`);
|
|
||||||
for (const e of env) {
|
|
||||||
if (e.value !== undefined) {
|
|
||||||
lines.push(` ${e.name.padEnd(nameW)}${e.value}`);
|
|
||||||
} else if (e.valueFrom?.secretRef) {
|
|
||||||
const ref = e.valueFrom.secretRef;
|
|
||||||
lines.push(` ${e.name.padEnd(nameW)}secret:${ref.name}/${ref.key}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const hc = server.healthCheck as { tool: string; arguments?: Record<string, unknown>; intervalSeconds?: number; timeoutSeconds?: number; failureThreshold?: number } | null;
|
|
||||||
if (hc) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Health Check:');
|
|
||||||
lines.push(` ${pad('Tool:', 22)}${hc.tool}`);
|
|
||||||
if (hc.arguments && Object.keys(hc.arguments).length > 0) {
|
|
||||||
lines.push(` ${pad('Arguments:', 22)}${JSON.stringify(hc.arguments)}`);
|
|
||||||
}
|
|
||||||
lines.push(` ${pad('Interval:', 22)}${hc.intervalSeconds ?? 60}s`);
|
|
||||||
lines.push(` ${pad('Timeout:', 22)}${hc.timeoutSeconds ?? 10}s`);
|
|
||||||
lines.push(` ${pad('Failure Threshold:', 22)}${hc.failureThreshold ?? 3}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Metadata:');
|
|
||||||
lines.push(` ${pad('ID:', 12)}${server.id}`);
|
|
||||||
if (server.createdAt) lines.push(` ${pad('Created:', 12)}${server.createdAt}`);
|
|
||||||
if (server.updatedAt) lines.push(` ${pad('Updated:', 12)}${server.updatedAt}`);
|
|
||||||
|
|
||||||
return lines.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatInstanceDetail(instance: Record<string, unknown>, inspect?: Record<string, unknown>): string {
|
|
||||||
const lines: string[] = [];
|
|
||||||
const server = instance.server as { name: string } | undefined;
|
|
||||||
lines.push(`=== Instance: ${server?.name ?? instance.id} ===`);
|
|
||||||
lines.push(`${pad('Status:')}${instance.status}`);
|
|
||||||
lines.push(`${pad('Server:')}${server?.name ?? String(instance.serverId)}`);
|
|
||||||
lines.push(`${pad('Container ID:')}${instance.containerId ?? '-'}`);
|
|
||||||
lines.push(`${pad('Port:')}${instance.port ?? '-'}`);
|
|
||||||
|
|
||||||
// Health section
|
|
||||||
const healthStatus = instance.healthStatus as string | null;
|
|
||||||
const lastHealthCheck = instance.lastHealthCheck as string | null;
|
|
||||||
if (healthStatus || lastHealthCheck) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Health:');
|
|
||||||
lines.push(` ${pad('Status:', 16)}${healthStatus ?? 'unknown'}`);
|
|
||||||
if (lastHealthCheck) lines.push(` ${pad('Last Check:', 16)}${lastHealthCheck}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const metadata = instance.metadata as Record<string, unknown> | undefined;
|
|
||||||
if (metadata && Object.keys(metadata).length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Metadata:');
|
|
||||||
for (const [key, value] of Object.entries(metadata)) {
|
|
||||||
lines.push(` ${pad(key + ':', 16)}${String(value)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (inspect) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Container:');
|
|
||||||
for (const [key, value] of Object.entries(inspect)) {
|
|
||||||
if (typeof value === 'object' && value !== null) {
|
|
||||||
lines.push(` ${key}: ${JSON.stringify(value)}`);
|
|
||||||
} else {
|
|
||||||
lines.push(` ${pad(key + ':', 16)}${String(value)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Events section (k8s-style)
|
|
||||||
const events = instance.events as Array<{ timestamp: string; type: string; message: string }> | undefined;
|
|
||||||
if (events && events.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Events:');
|
|
||||||
const tsW = 26;
|
|
||||||
const typeW = 10;
|
|
||||||
lines.push(` ${'TIMESTAMP'.padEnd(tsW)}${'TYPE'.padEnd(typeW)}MESSAGE`);
|
|
||||||
for (const ev of events) {
|
|
||||||
lines.push(` ${(ev.timestamp ?? '').padEnd(tsW)}${(ev.type ?? '').padEnd(typeW)}${ev.message ?? ''}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push('');
|
|
||||||
lines.push(` ${pad('ID:', 12)}${instance.id}`);
|
|
||||||
if (instance.createdAt) lines.push(` ${pad('Created:', 12)}${instance.createdAt}`);
|
|
||||||
if (instance.updatedAt) lines.push(` ${pad('Updated:', 12)}${instance.updatedAt}`);
|
|
||||||
|
|
||||||
return lines.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatProjectDetail(project: Record<string, unknown>): string {
|
|
||||||
const lines: string[] = [];
|
|
||||||
lines.push(`=== Project: ${project.name} ===`);
|
|
||||||
lines.push(`${pad('Name:')}${project.name}`);
|
|
||||||
if (project.description) lines.push(`${pad('Description:')}${project.description}`);
|
|
||||||
|
|
||||||
// Proxy config section
|
|
||||||
const proxyMode = project.proxyMode as string | undefined;
|
|
||||||
const llmProvider = project.llmProvider as string | undefined;
|
|
||||||
const llmModel = project.llmModel as string | undefined;
|
|
||||||
if (proxyMode || llmProvider || llmModel) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Proxy Config:');
|
|
||||||
lines.push(` ${pad('Mode:', 18)}${proxyMode ?? 'direct'}`);
|
|
||||||
if (llmProvider) lines.push(` ${pad('LLM Provider:', 18)}${llmProvider}`);
|
|
||||||
if (llmModel) lines.push(` ${pad('LLM Model:', 18)}${llmModel}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Servers section
|
|
||||||
const servers = project.servers as Array<{ server: { name: string } }> | undefined;
|
|
||||||
if (servers && servers.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Servers:');
|
|
||||||
lines.push(' NAME');
|
|
||||||
for (const s of servers) {
|
|
||||||
lines.push(` ${s.server.name}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Metadata:');
|
|
||||||
lines.push(` ${pad('ID:', 12)}${project.id}`);
|
|
||||||
if (project.ownerId) lines.push(` ${pad('Owner:', 12)}${project.ownerId}`);
|
|
||||||
if (project.createdAt) lines.push(` ${pad('Created:', 12)}${project.createdAt}`);
|
|
||||||
if (project.updatedAt) lines.push(` ${pad('Updated:', 12)}${project.updatedAt}`);
|
|
||||||
|
|
||||||
return lines.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatSecretDetail(secret: Record<string, unknown>, showValues: boolean): string {
|
|
||||||
const lines: string[] = [];
|
|
||||||
lines.push(`=== Secret: ${secret.name} ===`);
|
|
||||||
lines.push(`${pad('Name:')}${secret.name}`);
|
|
||||||
|
|
||||||
const data = secret.data as Record<string, string> | undefined;
|
|
||||||
if (data && Object.keys(data).length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Data:');
|
|
||||||
const keyW = Math.max(4, ...Object.keys(data).map((k) => k.length)) + 2;
|
|
||||||
for (const [key, value] of Object.entries(data)) {
|
|
||||||
const display = showValues ? value : '***';
|
|
||||||
lines.push(` ${key.padEnd(keyW)}${display}`);
|
|
||||||
}
|
|
||||||
if (!showValues) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push(' (use --show-values to reveal)');
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
lines.push(`${pad('Data:')}(empty)`);
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Metadata:');
|
|
||||||
lines.push(` ${pad('ID:', 12)}${secret.id}`);
|
|
||||||
if (secret.createdAt) lines.push(` ${pad('Created:', 12)}${secret.createdAt}`);
|
|
||||||
if (secret.updatedAt) lines.push(` ${pad('Updated:', 12)}${secret.updatedAt}`);
|
|
||||||
|
|
||||||
return lines.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatTemplateDetail(template: Record<string, unknown>): string {
|
|
||||||
const lines: string[] = [];
|
|
||||||
lines.push(`=== Template: ${template.name} ===`);
|
|
||||||
lines.push(`${pad('Name:')}${template.name}`);
|
|
||||||
lines.push(`${pad('Version:')}${template.version ?? '1.0.0'}`);
|
|
||||||
lines.push(`${pad('Transport:')}${template.transport ?? 'STDIO'}`);
|
|
||||||
lines.push(`${pad('Replicas:')}${template.replicas ?? 1}`);
|
|
||||||
if (template.dockerImage) lines.push(`${pad('Docker Image:')}${template.dockerImage}`);
|
|
||||||
if (template.packageName) lines.push(`${pad('Package:')}${template.packageName}`);
|
|
||||||
if (template.externalUrl) lines.push(`${pad('External URL:')}${template.externalUrl}`);
|
|
||||||
if (template.repositoryUrl) lines.push(`${pad('Repository:')}${template.repositoryUrl}`);
|
|
||||||
if (template.containerPort) lines.push(`${pad('Container Port:')}${template.containerPort}`);
|
|
||||||
if (template.description) lines.push(`${pad('Description:')}${template.description}`);
|
|
||||||
|
|
||||||
const command = template.command as string[] | null;
|
|
||||||
if (command && command.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Command:');
|
|
||||||
lines.push(` ${command.join(' ')}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const env = template.env as Array<{ name: string; description?: string; required?: boolean; defaultValue?: string }> | undefined;
|
|
||||||
if (env && env.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Environment Variables:');
|
|
||||||
const nameW = Math.max(6, ...env.map((e) => e.name.length)) + 2;
|
|
||||||
lines.push(` ${'NAME'.padEnd(nameW)}${'REQUIRED'.padEnd(10)}DESCRIPTION`);
|
|
||||||
for (const e of env) {
|
|
||||||
const req = e.required ? 'yes' : 'no';
|
|
||||||
const desc = e.description ?? '';
|
|
||||||
lines.push(` ${e.name.padEnd(nameW)}${req.padEnd(10)}${desc}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const hc = template.healthCheck as { tool: string; arguments?: Record<string, unknown>; intervalSeconds?: number; timeoutSeconds?: number; failureThreshold?: number } | null;
|
|
||||||
if (hc) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Health Check:');
|
|
||||||
lines.push(` ${pad('Tool:', 22)}${hc.tool}`);
|
|
||||||
if (hc.arguments && Object.keys(hc.arguments).length > 0) {
|
|
||||||
lines.push(` ${pad('Arguments:', 22)}${JSON.stringify(hc.arguments)}`);
|
|
||||||
}
|
|
||||||
lines.push(` ${pad('Interval:', 22)}${hc.intervalSeconds ?? 60}s`);
|
|
||||||
lines.push(` ${pad('Timeout:', 22)}${hc.timeoutSeconds ?? 10}s`);
|
|
||||||
lines.push(` ${pad('Failure Threshold:', 22)}${hc.failureThreshold ?? 3}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Usage:');
|
|
||||||
lines.push(` mcpctl create server my-${template.name} --from-template=${template.name}`);
|
|
||||||
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Metadata:');
|
|
||||||
lines.push(` ${pad('ID:', 12)}${template.id}`);
|
|
||||||
if (template.createdAt) lines.push(` ${pad('Created:', 12)}${template.createdAt}`);
|
|
||||||
if (template.updatedAt) lines.push(` ${pad('Updated:', 12)}${template.updatedAt}`);
|
|
||||||
|
|
||||||
return lines.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
interface RbacBinding { role: string; resource?: string; action?: string; name?: string }
|
|
||||||
interface RbacDef { name: string; subjects: Array<{ kind: string; name: string }>; roleBindings: RbacBinding[] }
|
|
||||||
interface PermissionSet { source: string; bindings: RbacBinding[] }
|
|
||||||
|
|
||||||
function formatPermissionSections(sections: PermissionSet[]): string[] {
|
|
||||||
const lines: string[] = [];
|
|
||||||
for (const section of sections) {
|
|
||||||
const bindings = section.bindings;
|
|
||||||
if (bindings.length === 0) continue;
|
|
||||||
|
|
||||||
const resourceBindings = bindings.filter((b) => 'resource' in b && b.resource !== undefined);
|
|
||||||
const operationBindings = bindings.filter((b) => 'action' in b && b.action !== undefined);
|
|
||||||
|
|
||||||
if (resourceBindings.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push(`${section.source} — Resources:`);
|
|
||||||
const roleW = Math.max(6, ...resourceBindings.map((b) => b.role.length)) + 2;
|
|
||||||
const resW = Math.max(10, ...resourceBindings.map((b) => (b.resource ?? '').length)) + 2;
|
|
||||||
const hasName = resourceBindings.some((b) => b.name);
|
|
||||||
if (hasName) {
|
|
||||||
lines.push(` ${'ROLE'.padEnd(roleW)}${'RESOURCE'.padEnd(resW)}NAME`);
|
|
||||||
} else {
|
|
||||||
lines.push(` ${'ROLE'.padEnd(roleW)}RESOURCE`);
|
|
||||||
}
|
|
||||||
for (const b of resourceBindings) {
|
|
||||||
if (hasName) {
|
|
||||||
lines.push(` ${b.role.padEnd(roleW)}${(b.resource ?? '').padEnd(resW)}${b.name ?? '*'}`);
|
|
||||||
} else {
|
|
||||||
lines.push(` ${b.role.padEnd(roleW)}${b.resource}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operationBindings.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push(`${section.source} — Operations:`);
|
|
||||||
lines.push(` ${'ACTION'.padEnd(20)}ROLE`);
|
|
||||||
for (const b of operationBindings) {
|
|
||||||
lines.push(` ${(b.action ?? '').padEnd(20)}${b.role}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return lines;
|
|
||||||
}
|
|
||||||
|
|
||||||
function collectBindingsForSubject(
|
|
||||||
rbacDefs: RbacDef[],
|
|
||||||
kind: string,
|
|
||||||
name: string,
|
|
||||||
): { rbacName: string; bindings: RbacBinding[] }[] {
|
|
||||||
const results: { rbacName: string; bindings: RbacBinding[] }[] = [];
|
|
||||||
for (const def of rbacDefs) {
|
|
||||||
const matched = def.subjects.some((s) => s.kind === kind && s.name === name);
|
|
||||||
if (matched) {
|
|
||||||
results.push({ rbacName: def.name, bindings: def.roleBindings });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatUserDetail(
|
|
||||||
user: Record<string, unknown>,
|
|
||||||
rbacDefs?: RbacDef[],
|
|
||||||
userGroups?: string[],
|
|
||||||
): string {
|
|
||||||
const lines: string[] = [];
|
|
||||||
lines.push(`=== User: ${user.email} ===`);
|
|
||||||
lines.push(`${pad('Email:')}${user.email}`);
|
|
||||||
lines.push(`${pad('Name:')}${(user.name as string | null) ?? '-'}`);
|
|
||||||
lines.push(`${pad('Provider:')}${(user.provider as string | null) ?? 'local'}`);
|
|
||||||
|
|
||||||
if (userGroups && userGroups.length > 0) {
|
|
||||||
lines.push(`${pad('Groups:')}${userGroups.join(', ')}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (rbacDefs) {
|
|
||||||
const email = user.email as string;
|
|
||||||
|
|
||||||
// Direct permissions (User:email subjects)
|
|
||||||
const directMatches = collectBindingsForSubject(rbacDefs, 'User', email);
|
|
||||||
const directBindings = directMatches.flatMap((m) => m.bindings);
|
|
||||||
const directSources = directMatches.map((m) => m.rbacName).join(', ');
|
|
||||||
|
|
||||||
// Inherited permissions (Group:name subjects)
|
|
||||||
const inheritedSections: PermissionSet[] = [];
|
|
||||||
if (userGroups) {
|
|
||||||
for (const groupName of userGroups) {
|
|
||||||
const groupMatches = collectBindingsForSubject(rbacDefs, 'Group', groupName);
|
|
||||||
const groupBindings = groupMatches.flatMap((m) => m.bindings);
|
|
||||||
if (groupBindings.length > 0) {
|
|
||||||
inheritedSections.push({ source: `Inherited (${groupName})`, bindings: groupBindings });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const sections: PermissionSet[] = [];
|
|
||||||
if (directBindings.length > 0) {
|
|
||||||
sections.push({ source: `Direct (${directSources})`, bindings: directBindings });
|
|
||||||
}
|
|
||||||
sections.push(...inheritedSections);
|
|
||||||
|
|
||||||
if (sections.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Access:');
|
|
||||||
lines.push(...formatPermissionSections(sections));
|
|
||||||
} else {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Access: (none)');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Metadata:');
|
|
||||||
lines.push(` ${pad('ID:', 12)}${user.id}`);
|
|
||||||
if (user.createdAt) lines.push(` ${pad('Created:', 12)}${user.createdAt}`);
|
|
||||||
if (user.updatedAt) lines.push(` ${pad('Updated:', 12)}${user.updatedAt}`);
|
|
||||||
|
|
||||||
return lines.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatGroupDetail(group: Record<string, unknown>, rbacDefs?: RbacDef[]): string {
|
|
||||||
const lines: string[] = [];
|
|
||||||
lines.push(`=== Group: ${group.name} ===`);
|
|
||||||
lines.push(`${pad('Name:')}${group.name}`);
|
|
||||||
if (group.description) lines.push(`${pad('Description:')}${group.description}`);
|
|
||||||
|
|
||||||
const members = group.members as Array<{ user: { email: string }; createdAt?: string }> | undefined;
|
|
||||||
if (members && members.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Members:');
|
|
||||||
const emailW = Math.max(6, ...members.map((m) => m.user.email.length)) + 2;
|
|
||||||
lines.push(` ${'EMAIL'.padEnd(emailW)}ADDED`);
|
|
||||||
for (const m of members) {
|
|
||||||
const added = (m.createdAt as string | undefined) ?? '-';
|
|
||||||
lines.push(` ${m.user.email.padEnd(emailW)}${added}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (rbacDefs) {
|
|
||||||
const groupName = group.name as string;
|
|
||||||
const matches = collectBindingsForSubject(rbacDefs, 'Group', groupName);
|
|
||||||
const allBindings = matches.flatMap((m) => m.bindings);
|
|
||||||
const sources = matches.map((m) => m.rbacName).join(', ');
|
|
||||||
|
|
||||||
if (allBindings.length > 0) {
|
|
||||||
const sections: PermissionSet[] = [{ source: `Granted (${sources})`, bindings: allBindings }];
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Access:');
|
|
||||||
lines.push(...formatPermissionSections(sections));
|
|
||||||
} else {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Access: (none)');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Metadata:');
|
|
||||||
lines.push(` ${pad('ID:', 12)}${group.id}`);
|
|
||||||
if (group.createdAt) lines.push(` ${pad('Created:', 12)}${group.createdAt}`);
|
|
||||||
if (group.updatedAt) lines.push(` ${pad('Updated:', 12)}${group.updatedAt}`);
|
|
||||||
|
|
||||||
return lines.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatRbacDetail(rbac: Record<string, unknown>): string {
|
|
||||||
const lines: string[] = [];
|
|
||||||
lines.push(`=== RBAC: ${rbac.name} ===`);
|
|
||||||
lines.push(`${pad('Name:')}${rbac.name}`);
|
|
||||||
|
|
||||||
const subjects = rbac.subjects as Array<{ kind: string; name: string }> | undefined;
|
|
||||||
if (subjects && subjects.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Subjects:');
|
|
||||||
const kindW = Math.max(6, ...subjects.map((s) => s.kind.length)) + 2;
|
|
||||||
lines.push(` ${'KIND'.padEnd(kindW)}NAME`);
|
|
||||||
for (const s of subjects) {
|
|
||||||
lines.push(` ${s.kind.padEnd(kindW)}${s.name}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const roleBindings = rbac.roleBindings as Array<{ role: string; resource?: string; action?: string; name?: string }> | undefined;
|
|
||||||
if (roleBindings && roleBindings.length > 0) {
|
|
||||||
// Separate resource bindings from operation bindings
|
|
||||||
const resourceBindings = roleBindings.filter((b) => 'resource' in b && b.resource !== undefined);
|
|
||||||
const operationBindings = roleBindings.filter((b) => 'action' in b && b.action !== undefined);
|
|
||||||
|
|
||||||
if (resourceBindings.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Resource Bindings:');
|
|
||||||
const roleW = Math.max(6, ...resourceBindings.map((b) => b.role.length)) + 2;
|
|
||||||
const resW = Math.max(10, ...resourceBindings.map((b) => (b.resource ?? '').length)) + 2;
|
|
||||||
const hasName = resourceBindings.some((b) => b.name);
|
|
||||||
if (hasName) {
|
|
||||||
lines.push(` ${'ROLE'.padEnd(roleW)}${'RESOURCE'.padEnd(resW)}NAME`);
|
|
||||||
} else {
|
|
||||||
lines.push(` ${'ROLE'.padEnd(roleW)}RESOURCE`);
|
|
||||||
}
|
|
||||||
for (const b of resourceBindings) {
|
|
||||||
if (hasName) {
|
|
||||||
lines.push(` ${b.role.padEnd(roleW)}${(b.resource ?? '').padEnd(resW)}${b.name ?? '*'}`);
|
|
||||||
} else {
|
|
||||||
lines.push(` ${b.role.padEnd(roleW)}${b.resource}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (operationBindings.length > 0) {
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Operations:');
|
|
||||||
lines.push(` ${'ACTION'.padEnd(20)}ROLE`);
|
|
||||||
for (const b of operationBindings) {
|
|
||||||
lines.push(` ${(b.action ?? '').padEnd(20)}${b.role}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
lines.push('');
|
|
||||||
lines.push('Metadata:');
|
|
||||||
lines.push(` ${pad('ID:', 12)}${rbac.id}`);
|
|
||||||
if (rbac.createdAt) lines.push(` ${pad('Created:', 12)}${rbac.createdAt}`);
|
|
||||||
if (rbac.updatedAt) lines.push(` ${pad('Updated:', 12)}${rbac.updatedAt}`);
|
|
||||||
|
|
||||||
return lines.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
function formatGenericDetail(obj: Record<string, unknown>): string {
|
|
||||||
const lines: string[] = [];
|
|
||||||
for (const [key, value] of Object.entries(obj)) {
|
|
||||||
if (value === null || value === undefined) {
|
|
||||||
lines.push(`${pad(key + ':')} -`);
|
|
||||||
} else if (Array.isArray(value)) {
|
|
||||||
if (value.length === 0) {
|
|
||||||
lines.push(`${pad(key + ':')} []`);
|
|
||||||
} else {
|
|
||||||
lines.push(`${key}:`);
|
|
||||||
for (const item of value) {
|
|
||||||
lines.push(` - ${typeof item === 'object' ? JSON.stringify(item) : String(item)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if (typeof value === 'object') {
|
|
||||||
lines.push(`${key}:`);
|
|
||||||
for (const [k, v] of Object.entries(value as Record<string, unknown>)) {
|
|
||||||
lines.push(` ${pad(k + ':')}${String(v)}`);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
lines.push(`${pad(key + ':')}${String(value)}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return lines.join('\n');
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createDescribeCommand(deps: DescribeCommandDeps): Command {
|
|
||||||
return new Command('describe')
|
|
||||||
.description('Show detailed information about a resource')
|
|
||||||
.argument('<resource>', 'resource type (server, project, instance)')
|
|
||||||
.argument('<id>', 'resource ID or name')
|
|
||||||
.option('-o, --output <format>', 'output format (detail, json, yaml)', 'detail')
|
|
||||||
.option('--show-values', 'Show secret values (default: masked)')
|
|
||||||
.action(async (resourceArg: string, idOrName: string, opts: { output: string; showValues?: boolean }) => {
|
|
||||||
const resource = resolveResource(resourceArg);
|
|
||||||
|
|
||||||
// Resolve name → ID
|
|
||||||
let id: string;
|
|
||||||
if (resource === 'instances') {
|
|
||||||
// Instances: accept instance ID or server name (resolve to first running instance)
|
|
||||||
try {
|
|
||||||
id = await resolveNameOrId(deps.client, resource, idOrName);
|
|
||||||
} catch {
|
|
||||||
// Not an instance ID — try as server name
|
|
||||||
const servers = await deps.client.get<Array<{ id: string; name: string }>>('/api/v1/servers');
|
|
||||||
const server = servers.find((s) => s.name === idOrName || s.id === idOrName);
|
|
||||||
if (server) {
|
|
||||||
const instances = await deps.client.get<Array<{ id: string; status: string }>>(`/api/v1/instances?serverId=${server.id}`);
|
|
||||||
const running = instances.find((i) => i.status === 'RUNNING') ?? instances[0];
|
|
||||||
if (running) {
|
|
||||||
id = running.id;
|
|
||||||
} else {
|
|
||||||
throw new Error(`No instances found for server '${idOrName}'`);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
id = idOrName;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
try {
|
|
||||||
id = await resolveNameOrId(deps.client, resource, idOrName);
|
|
||||||
} catch {
|
|
||||||
id = idOrName;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const item = await deps.fetchResource(resource, id) as Record<string, unknown>;
|
|
||||||
|
|
||||||
// Enrich instances with container inspect data
|
|
||||||
let inspect: Record<string, unknown> | undefined;
|
|
||||||
if (resource === 'instances' && deps.fetchInspect && item.containerId) {
|
|
||||||
try {
|
|
||||||
inspect = await deps.fetchInspect(id) as Record<string, unknown>;
|
|
||||||
item.containerInspect = inspect;
|
|
||||||
} catch {
|
|
||||||
// Container may not be available
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (opts.output === 'json') {
|
|
||||||
deps.log(formatJson(item));
|
|
||||||
} else if (opts.output === 'yaml') {
|
|
||||||
deps.log(formatYaml(item));
|
|
||||||
} else {
|
|
||||||
// Visually clean sectioned output
|
|
||||||
switch (resource) {
|
|
||||||
case 'servers':
|
|
||||||
deps.log(formatServerDetail(item));
|
|
||||||
break;
|
|
||||||
case 'instances':
|
|
||||||
deps.log(formatInstanceDetail(item, inspect));
|
|
||||||
break;
|
|
||||||
case 'secrets':
|
|
||||||
deps.log(formatSecretDetail(item, opts.showValues === true));
|
|
||||||
break;
|
|
||||||
case 'templates':
|
|
||||||
deps.log(formatTemplateDetail(item));
|
|
||||||
break;
|
|
||||||
case 'projects':
|
|
||||||
deps.log(formatProjectDetail(item));
|
|
||||||
break;
|
|
||||||
case 'users': {
|
|
||||||
// Fetch RBAC definitions and groups to show permissions
|
|
||||||
const [rbacDefsForUser, allGroupsForUser] = await Promise.all([
|
|
||||||
deps.client.get<RbacDef[]>('/api/v1/rbac').catch(() => [] as RbacDef[]),
|
|
||||||
deps.client.get<Array<{ name: string; members?: Array<{ user: { email: string } }> }>>('/api/v1/groups').catch(() => []),
|
|
||||||
]);
|
|
||||||
const userEmail = item.email as string;
|
|
||||||
const userGroupNames = allGroupsForUser
|
|
||||||
.filter((g) => g.members?.some((m) => m.user.email === userEmail))
|
|
||||||
.map((g) => g.name);
|
|
||||||
deps.log(formatUserDetail(item, rbacDefsForUser, userGroupNames));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case 'groups': {
|
|
||||||
const rbacDefsForGroup = await deps.client.get<RbacDef[]>('/api/v1/rbac').catch(() => [] as RbacDef[]);
|
|
||||||
deps.log(formatGroupDetail(item, rbacDefsForGroup));
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case 'rbac':
|
|
||||||
deps.log(formatRbacDetail(item));
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
deps.log(formatGenericDetail(item));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -1,114 +0,0 @@
|
|||||||
import { Command } from 'commander';
|
|
||||||
import { writeFileSync, readFileSync, unlinkSync, mkdtempSync } from 'node:fs';
|
|
||||||
import { join } from 'node:path';
|
|
||||||
import { tmpdir } from 'node:os';
|
|
||||||
import { execSync } from 'node:child_process';
|
|
||||||
import yaml from 'js-yaml';
|
|
||||||
import type { ApiClient } from '../api-client.js';
|
|
||||||
import { resolveResource, resolveNameOrId, stripInternalFields } from './shared.js';
|
|
||||||
|
|
||||||
export interface EditCommandDeps {
|
|
||||||
client: ApiClient;
|
|
||||||
log: (...args: unknown[]) => void;
|
|
||||||
/** Override for testing — return editor binary name. */
|
|
||||||
getEditor?: () => string;
|
|
||||||
/** Override for testing — simulate opening the editor. */
|
|
||||||
openEditor?: (filePath: string, editor: string) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
function getEditor(deps: EditCommandDeps): string {
|
|
||||||
if (deps.getEditor) return deps.getEditor();
|
|
||||||
return process.env.VISUAL ?? process.env.EDITOR ?? 'vi';
|
|
||||||
}
|
|
||||||
|
|
||||||
function openEditor(filePath: string, editor: string, deps: EditCommandDeps): void {
|
|
||||||
if (deps.openEditor) {
|
|
||||||
deps.openEditor(filePath, editor);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
execSync(`${editor} "${filePath}"`, { stdio: 'inherit' });
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createEditCommand(deps: EditCommandDeps): Command {
|
|
||||||
const { client, log } = deps;
|
|
||||||
|
|
||||||
return new Command('edit')
|
|
||||||
.description('Edit a resource in your default editor (server, project)')
|
|
||||||
.argument('<resource>', 'Resource type (server, project)')
|
|
||||||
.argument('<name-or-id>', 'Resource name or ID')
|
|
||||||
.action(async (resourceArg: string, nameOrId: string) => {
|
|
||||||
const resource = resolveResource(resourceArg);
|
|
||||||
|
|
||||||
// Instances are immutable
|
|
||||||
if (resource === 'instances') {
|
|
||||||
log('Error: instances are immutable and cannot be edited.');
|
|
||||||
log('To change an instance, update the server definition and let reconciliation handle it.');
|
|
||||||
process.exitCode = 1;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const validResources = ['servers', 'secrets', 'projects', 'groups', 'rbac'];
|
|
||||||
if (!validResources.includes(resource)) {
|
|
||||||
log(`Error: unknown resource type '${resourceArg}'`);
|
|
||||||
process.exitCode = 1;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resolve name → ID
|
|
||||||
const id = await resolveNameOrId(client, resource, nameOrId);
|
|
||||||
|
|
||||||
// Fetch current state
|
|
||||||
const current = await client.get<Record<string, unknown>>(`/api/v1/${resource}/${id}`);
|
|
||||||
|
|
||||||
// Strip read-only fields for editor
|
|
||||||
const editable = stripInternalFields(current);
|
|
||||||
|
|
||||||
// Serialize to YAML
|
|
||||||
const singular = resource.replace(/s$/, '');
|
|
||||||
const header = `# Editing ${singular}: ${nameOrId}\n# Save and close to apply changes. Clear the file to cancel.\n`;
|
|
||||||
const originalYaml = yaml.dump(editable, { lineWidth: 120, noRefs: true });
|
|
||||||
const content = header + originalYaml;
|
|
||||||
|
|
||||||
// Write to temp file
|
|
||||||
const tmpDir = mkdtempSync(join(tmpdir(), 'mcpctl-edit-'));
|
|
||||||
const tmpFile = join(tmpDir, `${singular}-${nameOrId}.yaml`);
|
|
||||||
writeFileSync(tmpFile, content, 'utf-8');
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Open editor
|
|
||||||
const editor = getEditor(deps);
|
|
||||||
openEditor(tmpFile, editor, deps);
|
|
||||||
|
|
||||||
// Read back
|
|
||||||
const modified = readFileSync(tmpFile, 'utf-8');
|
|
||||||
|
|
||||||
// Strip comments for comparison
|
|
||||||
const modifiedClean = modified
|
|
||||||
.split('\n')
|
|
||||||
.filter((line) => !line.startsWith('#'))
|
|
||||||
.join('\n')
|
|
||||||
.trim();
|
|
||||||
|
|
||||||
if (!modifiedClean) {
|
|
||||||
log('Edit cancelled (empty file).');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (modifiedClean === originalYaml.trim()) {
|
|
||||||
log(`${singular} '${nameOrId}' unchanged.`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse and apply
|
|
||||||
const updates = yaml.load(modifiedClean) as Record<string, unknown>;
|
|
||||||
await client.put(`/api/v1/${resource}/${id}`, updates);
|
|
||||||
log(`${singular} '${nameOrId}' updated.`);
|
|
||||||
} finally {
|
|
||||||
try {
|
|
||||||
unlinkSync(tmpFile);
|
|
||||||
} catch {
|
|
||||||
// Ignore cleanup errors
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -1,204 +0,0 @@
|
|||||||
import { Command } from 'commander';
|
|
||||||
import { formatTable } from '../formatters/table.js';
|
|
||||||
import { formatJson, formatYaml } from '../formatters/output.js';
|
|
||||||
import type { Column } from '../formatters/table.js';
|
|
||||||
import { resolveResource, stripInternalFields } from './shared.js';
|
|
||||||
|
|
||||||
export interface GetCommandDeps {
|
|
||||||
fetchResource: (resource: string, id?: string) => Promise<unknown[]>;
|
|
||||||
log: (...args: string[]) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ServerRow {
|
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
transport: string;
|
|
||||||
packageName: string | null;
|
|
||||||
dockerImage: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface ProjectRow {
|
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
description: string;
|
|
||||||
proxyMode: string;
|
|
||||||
ownerId: string;
|
|
||||||
servers?: Array<{ server: { name: string } }>;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SecretRow {
|
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
data: Record<string, string>;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface TemplateRow {
|
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
version: string;
|
|
||||||
transport: string;
|
|
||||||
packageName: string | null;
|
|
||||||
description: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface InstanceRow {
|
|
||||||
id: string;
|
|
||||||
serverId: string;
|
|
||||||
server?: { name: string };
|
|
||||||
status: string;
|
|
||||||
containerId: string | null;
|
|
||||||
port: number | null;
|
|
||||||
healthStatus: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const serverColumns: Column<ServerRow>[] = [
|
|
||||||
{ header: 'NAME', key: 'name' },
|
|
||||||
{ header: 'TRANSPORT', key: 'transport', width: 16 },
|
|
||||||
{ header: 'PACKAGE', key: (r) => r.packageName ?? '-' },
|
|
||||||
{ header: 'IMAGE', key: (r) => r.dockerImage ?? '-' },
|
|
||||||
{ header: 'ID', key: 'id' },
|
|
||||||
];
|
|
||||||
|
|
||||||
interface UserRow {
|
|
||||||
id: string;
|
|
||||||
email: string;
|
|
||||||
name: string | null;
|
|
||||||
provider: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface GroupRow {
|
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
description: string;
|
|
||||||
members?: Array<{ user: { email: string } }>;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface RbacRow {
|
|
||||||
id: string;
|
|
||||||
name: string;
|
|
||||||
subjects: Array<{ kind: string; name: string }>;
|
|
||||||
roleBindings: Array<{ role: string; resource?: string; action?: string; name?: string }>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const projectColumns: Column<ProjectRow>[] = [
|
|
||||||
{ header: 'NAME', key: 'name' },
|
|
||||||
{ header: 'MODE', key: (r) => r.proxyMode ?? 'direct', width: 10 },
|
|
||||||
{ header: 'SERVERS', key: (r) => r.servers ? String(r.servers.length) : '0', width: 8 },
|
|
||||||
{ header: 'DESCRIPTION', key: 'description', width: 30 },
|
|
||||||
{ header: 'ID', key: 'id' },
|
|
||||||
];
|
|
||||||
|
|
||||||
const userColumns: Column<UserRow>[] = [
|
|
||||||
{ header: 'EMAIL', key: 'email' },
|
|
||||||
{ header: 'NAME', key: (r) => r.name ?? '-' },
|
|
||||||
{ header: 'PROVIDER', key: (r) => r.provider ?? 'local', width: 10 },
|
|
||||||
{ header: 'ID', key: 'id' },
|
|
||||||
];
|
|
||||||
|
|
||||||
const groupColumns: Column<GroupRow>[] = [
|
|
||||||
{ header: 'NAME', key: 'name' },
|
|
||||||
{ header: 'MEMBERS', key: (r) => r.members ? String(r.members.length) : '0', width: 8 },
|
|
||||||
{ header: 'DESCRIPTION', key: 'description', width: 40 },
|
|
||||||
{ header: 'ID', key: 'id' },
|
|
||||||
];
|
|
||||||
|
|
||||||
const rbacColumns: Column<RbacRow>[] = [
|
|
||||||
{ header: 'NAME', key: 'name' },
|
|
||||||
{ header: 'SUBJECTS', key: (r) => r.subjects.map((s) => `${s.kind}:${s.name}`).join(', '), width: 30 },
|
|
||||||
{ header: 'BINDINGS', key: (r) => r.roleBindings.map((b) => {
|
|
||||||
if ('action' in b && b.action !== undefined) return `run>${b.action}`;
|
|
||||||
if ('resource' in b && b.resource !== undefined) {
|
|
||||||
const base = `${b.role}:${b.resource}`;
|
|
||||||
return b.name ? `${base}:${b.name}` : base;
|
|
||||||
}
|
|
||||||
return b.role;
|
|
||||||
}).join(', '), width: 40 },
|
|
||||||
{ header: 'ID', key: 'id' },
|
|
||||||
];
|
|
||||||
|
|
||||||
const secretColumns: Column<SecretRow>[] = [
|
|
||||||
{ header: 'NAME', key: 'name' },
|
|
||||||
{ header: 'KEYS', key: (r) => Object.keys(r.data).join(', ') || '-', width: 40 },
|
|
||||||
{ header: 'ID', key: 'id' },
|
|
||||||
];
|
|
||||||
|
|
||||||
const templateColumns: Column<TemplateRow>[] = [
|
|
||||||
{ header: 'NAME', key: 'name' },
|
|
||||||
{ header: 'VERSION', key: 'version', width: 10 },
|
|
||||||
{ header: 'TRANSPORT', key: 'transport', width: 16 },
|
|
||||||
{ header: 'PACKAGE', key: (r) => r.packageName ?? '-' },
|
|
||||||
{ header: 'DESCRIPTION', key: 'description', width: 50 },
|
|
||||||
];
|
|
||||||
|
|
||||||
const instanceColumns: Column<InstanceRow>[] = [
|
|
||||||
{ header: 'NAME', key: (r) => r.server?.name ?? '-', width: 20 },
|
|
||||||
{ header: 'STATUS', key: 'status', width: 10 },
|
|
||||||
{ header: 'HEALTH', key: (r) => r.healthStatus ?? '-', width: 10 },
|
|
||||||
{ header: 'PORT', key: (r) => r.port != null ? String(r.port) : '-', width: 6 },
|
|
||||||
{ header: 'CONTAINER', key: (r) => r.containerId ? r.containerId.slice(0, 12) : '-', width: 14 },
|
|
||||||
{ header: 'ID', key: 'id' },
|
|
||||||
];
|
|
||||||
|
|
||||||
function getColumnsForResource(resource: string): Column<Record<string, unknown>>[] {
|
|
||||||
switch (resource) {
|
|
||||||
case 'servers':
|
|
||||||
return serverColumns as unknown as Column<Record<string, unknown>>[];
|
|
||||||
case 'projects':
|
|
||||||
return projectColumns as unknown as Column<Record<string, unknown>>[];
|
|
||||||
case 'secrets':
|
|
||||||
return secretColumns as unknown as Column<Record<string, unknown>>[];
|
|
||||||
case 'templates':
|
|
||||||
return templateColumns as unknown as Column<Record<string, unknown>>[];
|
|
||||||
case 'instances':
|
|
||||||
return instanceColumns as unknown as Column<Record<string, unknown>>[];
|
|
||||||
case 'users':
|
|
||||||
return userColumns as unknown as Column<Record<string, unknown>>[];
|
|
||||||
case 'groups':
|
|
||||||
return groupColumns as unknown as Column<Record<string, unknown>>[];
|
|
||||||
case 'rbac':
|
|
||||||
return rbacColumns as unknown as Column<Record<string, unknown>>[];
|
|
||||||
default:
|
|
||||||
return [
|
|
||||||
{ header: 'ID', key: 'id' as keyof Record<string, unknown> },
|
|
||||||
{ header: 'NAME', key: 'name' as keyof Record<string, unknown> },
|
|
||||||
];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Transform API response items into apply-compatible format.
|
|
||||||
* Strips internal fields and wraps in the resource key.
|
|
||||||
*/
|
|
||||||
function toApplyFormat(resource: string, items: unknown[]): Record<string, unknown[]> {
|
|
||||||
const cleaned = items.map((item) => {
|
|
||||||
return stripInternalFields(item as Record<string, unknown>);
|
|
||||||
});
|
|
||||||
return { [resource]: cleaned };
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createGetCommand(deps: GetCommandDeps): Command {
|
|
||||||
return new Command('get')
|
|
||||||
.description('List resources (servers, projects, instances)')
|
|
||||||
.argument('<resource>', 'resource type (servers, projects, instances)')
|
|
||||||
.argument('[id]', 'specific resource ID or name')
|
|
||||||
.option('-o, --output <format>', 'output format (table, json, yaml)', 'table')
|
|
||||||
.action(async (resourceArg: string, id: string | undefined, opts: { output: string }) => {
|
|
||||||
const resource = resolveResource(resourceArg);
|
|
||||||
const items = await deps.fetchResource(resource, id);
|
|
||||||
|
|
||||||
if (opts.output === 'json') {
|
|
||||||
// Apply-compatible JSON wrapped in resource key
|
|
||||||
deps.log(formatJson(toApplyFormat(resource, items)));
|
|
||||||
} else if (opts.output === 'yaml') {
|
|
||||||
// Apply-compatible YAML wrapped in resource key
|
|
||||||
deps.log(formatYaml(toApplyFormat(resource, items)));
|
|
||||||
} else {
|
|
||||||
if (items.length === 0) {
|
|
||||||
deps.log(`No ${resource} found.`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const columns = getColumnsForResource(resource);
|
|
||||||
deps.log(formatTable(items as Record<string, unknown>[], columns));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -1,98 +0,0 @@
|
|||||||
import { Command } from 'commander';
|
|
||||||
import type { ApiClient } from '../api-client.js';
|
|
||||||
|
|
||||||
export interface LogsCommandDeps {
|
|
||||||
client: ApiClient;
|
|
||||||
log: (...args: unknown[]) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface InstanceInfo {
|
|
||||||
id: string;
|
|
||||||
status: string;
|
|
||||||
containerId: string | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Resolve a name/ID to an instance ID.
|
|
||||||
* Accepts: instance ID, server name, or server ID.
|
|
||||||
* For servers with multiple replicas, picks by --instance index or first RUNNING.
|
|
||||||
*/
|
|
||||||
async function resolveInstance(
|
|
||||||
client: ApiClient,
|
|
||||||
nameOrId: string,
|
|
||||||
instanceIndex?: number,
|
|
||||||
): Promise<{ instanceId: string; serverName?: string; replicaInfo?: string }> {
|
|
||||||
// Try as instance ID first
|
|
||||||
try {
|
|
||||||
await client.get(`/api/v1/instances/${nameOrId}`);
|
|
||||||
return { instanceId: nameOrId };
|
|
||||||
} catch {
|
|
||||||
// Not a valid instance ID
|
|
||||||
}
|
|
||||||
|
|
||||||
// Try as server name/ID → find its instances
|
|
||||||
const servers = await client.get<Array<{ id: string; name: string }>>('/api/v1/servers');
|
|
||||||
const server = servers.find((s) => s.name === nameOrId || s.id === nameOrId);
|
|
||||||
if (!server) {
|
|
||||||
throw new Error(`Instance or server '${nameOrId}' not found`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const instances = await client.get<InstanceInfo[]>(`/api/v1/instances?serverId=${server.id}`);
|
|
||||||
if (instances.length === 0) {
|
|
||||||
throw new Error(`No instances found for server '${server.name}'`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Select by index or pick first running
|
|
||||||
let selected: InstanceInfo | undefined;
|
|
||||||
if (instanceIndex !== undefined) {
|
|
||||||
if (instanceIndex < 0 || instanceIndex >= instances.length) {
|
|
||||||
throw new Error(`Instance index ${instanceIndex} out of range (server '${server.name}' has ${instances.length} instance${instances.length > 1 ? 's' : ''})`);
|
|
||||||
}
|
|
||||||
selected = instances[instanceIndex];
|
|
||||||
} else {
|
|
||||||
selected = instances.find((i) => i.status === 'RUNNING') ?? instances[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!selected) {
|
|
||||||
throw new Error(`No instances found for server '${server.name}'`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const result: { instanceId: string; serverName?: string; replicaInfo?: string } = {
|
|
||||||
instanceId: selected.id,
|
|
||||||
serverName: server.name,
|
|
||||||
};
|
|
||||||
if (instances.length > 1) {
|
|
||||||
result.replicaInfo = `instance ${instances.indexOf(selected) + 1}/${instances.length}`;
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createLogsCommand(deps: LogsCommandDeps): Command {
|
|
||||||
const { client, log } = deps;
|
|
||||||
|
|
||||||
return new Command('logs')
|
|
||||||
.description('Get logs from an MCP server instance')
|
|
||||||
.argument('<name>', 'Server name, server ID, or instance ID')
|
|
||||||
.option('-t, --tail <lines>', 'Number of lines to show')
|
|
||||||
.option('-i, --instance <index>', 'Instance/replica index (0-based, for servers with multiple replicas)')
|
|
||||||
.action(async (nameOrId: string, opts: { tail?: string; instance?: string }) => {
|
|
||||||
const instanceIndex = opts.instance !== undefined ? parseInt(opts.instance, 10) : undefined;
|
|
||||||
const { instanceId, serverName, replicaInfo } = await resolveInstance(client, nameOrId, instanceIndex);
|
|
||||||
|
|
||||||
if (replicaInfo) {
|
|
||||||
process.stderr.write(`Showing logs for ${serverName} (${replicaInfo})\n`);
|
|
||||||
}
|
|
||||||
|
|
||||||
let url = `/api/v1/instances/${instanceId}/logs`;
|
|
||||||
if (opts.tail) {
|
|
||||||
url += `?tail=${opts.tail}`;
|
|
||||||
}
|
|
||||||
const logs = await client.get<{ stdout: string; stderr: string }>(url);
|
|
||||||
if (logs.stdout) {
|
|
||||||
log(logs.stdout);
|
|
||||||
}
|
|
||||||
if (logs.stderr) {
|
|
||||||
process.stderr.write(logs.stderr);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
import { Command } from 'commander';
|
|
||||||
import type { ApiClient } from '../api-client.js';
|
|
||||||
import { resolveNameOrId } from './shared.js';
|
|
||||||
|
|
||||||
export interface ProjectOpsDeps {
|
|
||||||
client: ApiClient;
|
|
||||||
log: (...args: string[]) => void;
|
|
||||||
getProject: () => string | undefined;
|
|
||||||
}
|
|
||||||
|
|
||||||
function requireProject(deps: ProjectOpsDeps): string {
|
|
||||||
const project = deps.getProject();
|
|
||||||
if (!project) {
|
|
||||||
deps.log('Error: --project <name> is required for this command.');
|
|
||||||
process.exitCode = 1;
|
|
||||||
throw new Error('--project required');
|
|
||||||
}
|
|
||||||
return project;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createAttachServerCommand(deps: ProjectOpsDeps): Command {
|
|
||||||
const { client, log } = deps;
|
|
||||||
|
|
||||||
return new Command('attach-server')
|
|
||||||
.description('Attach a server to a project (requires --project)')
|
|
||||||
.argument('<server-name>', 'Server name to attach')
|
|
||||||
.action(async (serverName: string) => {
|
|
||||||
const projectName = requireProject(deps);
|
|
||||||
const projectId = await resolveNameOrId(client, 'projects', projectName);
|
|
||||||
await client.post(`/api/v1/projects/${projectId}/servers`, { server: serverName });
|
|
||||||
log(`server '${serverName}' attached to project '${projectName}'`);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createDetachServerCommand(deps: ProjectOpsDeps): Command {
|
|
||||||
const { client, log } = deps;
|
|
||||||
|
|
||||||
return new Command('detach-server')
|
|
||||||
.description('Detach a server from a project (requires --project)')
|
|
||||||
.argument('<server-name>', 'Server name to detach')
|
|
||||||
.action(async (serverName: string) => {
|
|
||||||
const projectName = requireProject(deps);
|
|
||||||
const projectId = await resolveNameOrId(client, 'projects', projectName);
|
|
||||||
await client.delete(`/api/v1/projects/${projectId}/servers/${serverName}`);
|
|
||||||
log(`server '${serverName}' detached from project '${projectName}'`);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
import type { ApiClient } from '../api-client.js';
|
|
||||||
|
|
||||||
export const RESOURCE_ALIASES: Record<string, string> = {
|
|
||||||
server: 'servers',
|
|
||||||
srv: 'servers',
|
|
||||||
project: 'projects',
|
|
||||||
proj: 'projects',
|
|
||||||
instance: 'instances',
|
|
||||||
inst: 'instances',
|
|
||||||
secret: 'secrets',
|
|
||||||
sec: 'secrets',
|
|
||||||
template: 'templates',
|
|
||||||
tpl: 'templates',
|
|
||||||
user: 'users',
|
|
||||||
group: 'groups',
|
|
||||||
rbac: 'rbac',
|
|
||||||
'rbac-definition': 'rbac',
|
|
||||||
'rbac-binding': 'rbac',
|
|
||||||
};
|
|
||||||
|
|
||||||
export function resolveResource(name: string): string {
|
|
||||||
const lower = name.toLowerCase();
|
|
||||||
return RESOURCE_ALIASES[lower] ?? lower;
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Resolve a name-or-ID to an ID. CUIDs pass through; names are looked up. */
|
|
||||||
export async function resolveNameOrId(
|
|
||||||
client: ApiClient,
|
|
||||||
resource: string,
|
|
||||||
nameOrId: string,
|
|
||||||
): Promise<string> {
|
|
||||||
// CUIDs start with 'c' followed by 24+ alphanumeric chars
|
|
||||||
if (/^c[a-z0-9]{24}/.test(nameOrId)) {
|
|
||||||
return nameOrId;
|
|
||||||
}
|
|
||||||
// Users resolve by email, not name
|
|
||||||
if (resource === 'users') {
|
|
||||||
const items = await client.get<Array<{ id: string; email: string }>>(`/api/v1/${resource}`);
|
|
||||||
const match = items.find((item) => item.email === nameOrId);
|
|
||||||
if (match) return match.id;
|
|
||||||
throw new Error(`user '${nameOrId}' not found`);
|
|
||||||
}
|
|
||||||
const items = await client.get<Array<Record<string, unknown>>>(`/api/v1/${resource}`);
|
|
||||||
const match = items.find((item) => {
|
|
||||||
// Instances use server.name, other resources use name directly
|
|
||||||
if (resource === 'instances') {
|
|
||||||
const server = item.server as { name?: string } | undefined;
|
|
||||||
return server?.name === nameOrId;
|
|
||||||
}
|
|
||||||
return item.name === nameOrId;
|
|
||||||
});
|
|
||||||
if (match) return match.id as string;
|
|
||||||
throw new Error(`${resource.replace(/s$/, '')} '${nameOrId}' not found`);
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Strip internal/read-only fields from an API response to make it apply-compatible. */
|
|
||||||
export function stripInternalFields(obj: Record<string, unknown>): Record<string, unknown> {
|
|
||||||
const result = { ...obj };
|
|
||||||
for (const key of ['id', 'createdAt', 'updatedAt', 'version', 'ownerId']) {
|
|
||||||
delete result[key];
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
@@ -2,19 +2,16 @@ import { Command } from 'commander';
|
|||||||
import http from 'node:http';
|
import http from 'node:http';
|
||||||
import { loadConfig } from '../config/index.js';
|
import { loadConfig } from '../config/index.js';
|
||||||
import type { ConfigLoaderDeps } from '../config/index.js';
|
import type { ConfigLoaderDeps } from '../config/index.js';
|
||||||
import { loadCredentials } from '../auth/index.js';
|
|
||||||
import type { CredentialsDeps } from '../auth/index.js';
|
|
||||||
import { formatJson, formatYaml } from '../formatters/index.js';
|
import { formatJson, formatYaml } from '../formatters/index.js';
|
||||||
import { APP_VERSION } from '@mcpctl/shared';
|
import { APP_VERSION } from '@mcpctl/shared';
|
||||||
|
|
||||||
export interface StatusCommandDeps {
|
export interface StatusCommandDeps {
|
||||||
configDeps: Partial<ConfigLoaderDeps>;
|
configDeps: Partial<ConfigLoaderDeps>;
|
||||||
credentialsDeps: Partial<CredentialsDeps>;
|
|
||||||
log: (...args: string[]) => void;
|
log: (...args: string[]) => void;
|
||||||
checkHealth: (url: string) => Promise<boolean>;
|
checkDaemon: (url: string) => Promise<boolean>;
|
||||||
}
|
}
|
||||||
|
|
||||||
function defaultCheckHealth(url: string): Promise<boolean> {
|
function defaultCheckDaemon(url: string): Promise<boolean> {
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
const req = http.get(`${url}/health`, { timeout: 3000 }, (res) => {
|
const req = http.get(`${url}/health`, { timeout: 3000 }, (res) => {
|
||||||
resolve(res.statusCode !== undefined && res.statusCode >= 200 && res.statusCode < 400);
|
resolve(res.statusCode !== undefined && res.statusCode >= 200 && res.statusCode < 400);
|
||||||
@@ -30,33 +27,24 @@ function defaultCheckHealth(url: string): Promise<boolean> {
|
|||||||
|
|
||||||
const defaultDeps: StatusCommandDeps = {
|
const defaultDeps: StatusCommandDeps = {
|
||||||
configDeps: {},
|
configDeps: {},
|
||||||
credentialsDeps: {},
|
|
||||||
log: (...args) => console.log(...args),
|
log: (...args) => console.log(...args),
|
||||||
checkHealth: defaultCheckHealth,
|
checkDaemon: defaultCheckDaemon,
|
||||||
};
|
};
|
||||||
|
|
||||||
export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command {
|
export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command {
|
||||||
const { configDeps, credentialsDeps, log, checkHealth } = { ...defaultDeps, ...deps };
|
const { configDeps, log, checkDaemon } = { ...defaultDeps, ...deps };
|
||||||
|
|
||||||
return new Command('status')
|
return new Command('status')
|
||||||
.description('Show mcpctl status and connectivity')
|
.description('Show mcpctl status and connectivity')
|
||||||
.option('-o, --output <format>', 'output format (table, json, yaml)', 'table')
|
.option('-o, --output <format>', 'output format (table, json, yaml)', 'table')
|
||||||
.action(async (opts: { output: string }) => {
|
.action(async (opts: { output: string }) => {
|
||||||
const config = loadConfig(configDeps);
|
const config = loadConfig(configDeps);
|
||||||
const creds = loadCredentials(credentialsDeps);
|
const daemonReachable = await checkDaemon(config.daemonUrl);
|
||||||
|
|
||||||
const [mcplocalReachable, mcpdReachable] = await Promise.all([
|
|
||||||
checkHealth(config.mcplocalUrl),
|
|
||||||
checkHealth(config.mcpdUrl),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const status = {
|
const status = {
|
||||||
version: APP_VERSION,
|
version: APP_VERSION,
|
||||||
mcplocalUrl: config.mcplocalUrl,
|
daemonUrl: config.daemonUrl,
|
||||||
mcplocalReachable,
|
daemonReachable,
|
||||||
mcpdUrl: config.mcpdUrl,
|
|
||||||
mcpdReachable,
|
|
||||||
auth: creds ? { user: creds.user } : null,
|
|
||||||
registries: config.registries,
|
registries: config.registries,
|
||||||
outputFormat: config.outputFormat,
|
outputFormat: config.outputFormat,
|
||||||
};
|
};
|
||||||
@@ -67,9 +55,7 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
|
|||||||
log(formatYaml(status));
|
log(formatYaml(status));
|
||||||
} else {
|
} else {
|
||||||
log(`mcpctl v${status.version}`);
|
log(`mcpctl v${status.version}`);
|
||||||
log(`mcplocal: ${status.mcplocalUrl} (${mcplocalReachable ? 'connected' : 'unreachable'})`);
|
log(`Daemon: ${status.daemonUrl} (${daemonReachable ? 'connected' : 'unreachable'})`);
|
||||||
log(`mcpd: ${status.mcpdUrl} (${mcpdReachable ? 'connected' : 'unreachable'})`);
|
|
||||||
log(`Auth: ${creds ? `logged in as ${creds.user}` : 'not logged in'}`);
|
|
||||||
log(`Registries: ${status.registries.join(', ')}`);
|
log(`Registries: ${status.registries.join(', ')}`);
|
||||||
log(`Output: ${status.outputFormat}`);
|
log(`Output: ${status.outputFormat}`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,12 +1,8 @@
|
|||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
export const McpctlConfigSchema = z.object({
|
export const McpctlConfigSchema = z.object({
|
||||||
/** mcplocal daemon endpoint (local LLM pre-processing proxy) */
|
/** mcpd daemon endpoint */
|
||||||
mcplocalUrl: z.string().default('http://localhost:3200'),
|
daemonUrl: z.string().default('http://localhost:3000'),
|
||||||
/** mcpd daemon endpoint (remote instance manager) */
|
|
||||||
mcpdUrl: z.string().default('http://localhost:3100'),
|
|
||||||
/** @deprecated Use mcplocalUrl instead. Kept for backward compatibility. */
|
|
||||||
daemonUrl: z.string().optional(),
|
|
||||||
/** Active registries for search */
|
/** Active registries for search */
|
||||||
registries: z.array(z.enum(['official', 'glama', 'smithery'])).default(['official', 'glama', 'smithery']),
|
registries: z.array(z.enum(['official', 'glama', 'smithery'])).default(['official', 'glama', 'smithery']),
|
||||||
/** Cache TTL in milliseconds */
|
/** Cache TTL in milliseconds */
|
||||||
@@ -19,13 +15,6 @@ export const McpctlConfigSchema = z.object({
|
|||||||
outputFormat: z.enum(['table', 'json', 'yaml']).default('table'),
|
outputFormat: z.enum(['table', 'json', 'yaml']).default('table'),
|
||||||
/** Smithery API key */
|
/** Smithery API key */
|
||||||
smitheryApiKey: z.string().optional(),
|
smitheryApiKey: z.string().optional(),
|
||||||
}).transform((cfg) => {
|
|
||||||
// Backward compatibility: if old daemonUrl is set but mcplocalUrl wasn't explicitly changed,
|
|
||||||
// use daemonUrl as mcplocalUrl
|
|
||||||
if (cfg.daemonUrl && cfg.mcplocalUrl === 'http://localhost:3200') {
|
|
||||||
return { ...cfg, mcplocalUrl: cfg.daemonUrl };
|
|
||||||
}
|
|
||||||
return cfg;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
export type McpctlConfig = z.infer<typeof McpctlConfigSchema>;
|
export type McpctlConfig = z.infer<typeof McpctlConfigSchema>;
|
||||||
|
|||||||
@@ -3,138 +3,17 @@ import { Command } from 'commander';
|
|||||||
import { APP_NAME, APP_VERSION } from '@mcpctl/shared';
|
import { APP_NAME, APP_VERSION } from '@mcpctl/shared';
|
||||||
import { createConfigCommand } from './commands/config.js';
|
import { createConfigCommand } from './commands/config.js';
|
||||||
import { createStatusCommand } from './commands/status.js';
|
import { createStatusCommand } from './commands/status.js';
|
||||||
import { createGetCommand } from './commands/get.js';
|
|
||||||
import { createDescribeCommand } from './commands/describe.js';
|
|
||||||
import { createDeleteCommand } from './commands/delete.js';
|
|
||||||
import { createLogsCommand } from './commands/logs.js';
|
|
||||||
import { createApplyCommand } from './commands/apply.js';
|
|
||||||
import { createCreateCommand } from './commands/create.js';
|
|
||||||
import { createEditCommand } from './commands/edit.js';
|
|
||||||
import { createBackupCommand, createRestoreCommand } from './commands/backup.js';
|
|
||||||
import { createLoginCommand, createLogoutCommand } from './commands/auth.js';
|
|
||||||
import { createAttachServerCommand, createDetachServerCommand } from './commands/project-ops.js';
|
|
||||||
import { ApiClient, ApiError } from './api-client.js';
|
|
||||||
import { loadConfig } from './config/index.js';
|
|
||||||
import { loadCredentials } from './auth/index.js';
|
|
||||||
import { resolveNameOrId } from './commands/shared.js';
|
|
||||||
|
|
||||||
export function createProgram(): Command {
|
export function createProgram(): Command {
|
||||||
const program = new Command()
|
const program = new Command()
|
||||||
.name(APP_NAME)
|
.name(APP_NAME)
|
||||||
.description('Manage MCP servers like kubectl manages containers')
|
.description('Manage MCP servers like kubectl manages containers')
|
||||||
.version(APP_VERSION, '-v, --version')
|
.version(APP_VERSION, '-v, --version')
|
||||||
.enablePositionalOptions()
|
.option('-o, --output <format>', 'output format (table, json, yaml)', 'table')
|
||||||
.option('--daemon-url <url>', 'mcplocal daemon URL')
|
.option('--daemon-url <url>', 'mcpd daemon URL');
|
||||||
.option('--direct', 'bypass mcplocal and connect directly to mcpd')
|
|
||||||
.option('--project <name>', 'Target project for project commands');
|
|
||||||
|
|
||||||
|
program.addCommand(createConfigCommand());
|
||||||
program.addCommand(createStatusCommand());
|
program.addCommand(createStatusCommand());
|
||||||
program.addCommand(createLoginCommand());
|
|
||||||
program.addCommand(createLogoutCommand());
|
|
||||||
|
|
||||||
// Resolve target URL: --direct goes to mcpd, default goes to mcplocal
|
|
||||||
const config = loadConfig();
|
|
||||||
const creds = loadCredentials();
|
|
||||||
const opts = program.opts();
|
|
||||||
let baseUrl: string;
|
|
||||||
if (opts.daemonUrl) {
|
|
||||||
baseUrl = opts.daemonUrl as string;
|
|
||||||
} else if (opts.direct) {
|
|
||||||
baseUrl = config.mcpdUrl;
|
|
||||||
} else {
|
|
||||||
baseUrl = config.mcplocalUrl;
|
|
||||||
}
|
|
||||||
|
|
||||||
const client = new ApiClient({ baseUrl, token: creds?.token ?? undefined });
|
|
||||||
|
|
||||||
program.addCommand(createConfigCommand(undefined, {
|
|
||||||
client,
|
|
||||||
credentialsDeps: {},
|
|
||||||
log: (...args) => console.log(...args),
|
|
||||||
}));
|
|
||||||
|
|
||||||
const fetchResource = async (resource: string, nameOrId?: string): Promise<unknown[]> => {
|
|
||||||
if (nameOrId) {
|
|
||||||
// Glob pattern — use query param filtering
|
|
||||||
if (nameOrId.includes('*')) {
|
|
||||||
return client.get<unknown[]>(`/api/v1/${resource}?name=${encodeURIComponent(nameOrId)}`);
|
|
||||||
}
|
|
||||||
let id: string;
|
|
||||||
try {
|
|
||||||
id = await resolveNameOrId(client, resource, nameOrId);
|
|
||||||
} catch {
|
|
||||||
id = nameOrId;
|
|
||||||
}
|
|
||||||
const item = await client.get(`/api/v1/${resource}/${id}`);
|
|
||||||
return [item];
|
|
||||||
}
|
|
||||||
return client.get<unknown[]>(`/api/v1/${resource}`);
|
|
||||||
};
|
|
||||||
|
|
||||||
const fetchSingleResource = async (resource: string, nameOrId: string): Promise<unknown> => {
|
|
||||||
let id: string;
|
|
||||||
try {
|
|
||||||
id = await resolveNameOrId(client, resource, nameOrId);
|
|
||||||
} catch {
|
|
||||||
id = nameOrId;
|
|
||||||
}
|
|
||||||
return client.get(`/api/v1/${resource}/${id}`);
|
|
||||||
};
|
|
||||||
|
|
||||||
program.addCommand(createGetCommand({
|
|
||||||
fetchResource,
|
|
||||||
log: (...args) => console.log(...args),
|
|
||||||
}));
|
|
||||||
|
|
||||||
program.addCommand(createDescribeCommand({
|
|
||||||
client,
|
|
||||||
fetchResource: fetchSingleResource,
|
|
||||||
fetchInspect: async (id: string) => client.get(`/api/v1/instances/${id}/inspect`),
|
|
||||||
log: (...args) => console.log(...args),
|
|
||||||
}));
|
|
||||||
|
|
||||||
program.addCommand(createDeleteCommand({
|
|
||||||
client,
|
|
||||||
log: (...args) => console.log(...args),
|
|
||||||
}));
|
|
||||||
|
|
||||||
program.addCommand(createLogsCommand({
|
|
||||||
client,
|
|
||||||
log: (...args) => console.log(...args),
|
|
||||||
}));
|
|
||||||
|
|
||||||
program.addCommand(createCreateCommand({
|
|
||||||
client,
|
|
||||||
log: (...args) => console.log(...args),
|
|
||||||
}));
|
|
||||||
|
|
||||||
program.addCommand(createEditCommand({
|
|
||||||
client,
|
|
||||||
log: (...args) => console.log(...args),
|
|
||||||
}));
|
|
||||||
|
|
||||||
program.addCommand(createApplyCommand({
|
|
||||||
client,
|
|
||||||
log: (...args) => console.log(...args),
|
|
||||||
}));
|
|
||||||
|
|
||||||
program.addCommand(createBackupCommand({
|
|
||||||
client,
|
|
||||||
log: (...args) => console.log(...args),
|
|
||||||
}));
|
|
||||||
|
|
||||||
program.addCommand(createRestoreCommand({
|
|
||||||
client,
|
|
||||||
log: (...args) => console.log(...args),
|
|
||||||
}));
|
|
||||||
|
|
||||||
const projectOpsDeps = {
|
|
||||||
client,
|
|
||||||
log: (...args: string[]) => console.log(...args),
|
|
||||||
getProject: () => program.opts().project as string | undefined,
|
|
||||||
};
|
|
||||||
program.addCommand(createAttachServerCommand(projectOpsDeps));
|
|
||||||
program.addCommand(createDetachServerCommand(projectOpsDeps));
|
|
||||||
|
|
||||||
return program;
|
return program;
|
||||||
}
|
}
|
||||||
@@ -146,35 +25,5 @@ const isDirectRun =
|
|||||||
import.meta.url === `file://${process.argv[1]}`;
|
import.meta.url === `file://${process.argv[1]}`;
|
||||||
|
|
||||||
if (isDirectRun) {
|
if (isDirectRun) {
|
||||||
createProgram().parseAsync(process.argv).catch((err: unknown) => {
|
createProgram().parseAsync(process.argv);
|
||||||
if (err instanceof ApiError) {
|
|
||||||
if (err.status === 401) {
|
|
||||||
console.error("Error: you need to log in. Run 'mcpctl login' to authenticate.");
|
|
||||||
} else if (err.status === 403) {
|
|
||||||
console.error('Error: permission denied. You do not have access to this resource.');
|
|
||||||
} else {
|
|
||||||
let msg: string;
|
|
||||||
try {
|
|
||||||
const parsed = JSON.parse(err.body) as { error?: string; message?: string; details?: unknown };
|
|
||||||
msg = parsed.error ?? parsed.message ?? err.body;
|
|
||||||
if (parsed.details && Array.isArray(parsed.details)) {
|
|
||||||
const issues = parsed.details as Array<{ message?: string; path?: string[] }>;
|
|
||||||
const detail = issues.map((i) => {
|
|
||||||
const path = i.path?.join('.') ?? '';
|
|
||||||
return path ? `${path}: ${i.message}` : (i.message ?? '');
|
|
||||||
}).filter(Boolean).join('; ');
|
|
||||||
if (detail) msg += `: ${detail}`;
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
msg = err.body;
|
|
||||||
}
|
|
||||||
console.error(`Error: ${msg}`);
|
|
||||||
}
|
|
||||||
} else if (err instanceof Error) {
|
|
||||||
console.error(`Error: ${err.message}`);
|
|
||||||
} else {
|
|
||||||
console.error(`Error: ${String(err)}`);
|
|
||||||
}
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,105 +0,0 @@
|
|||||||
import type { RegistryServer, SearchOptions, RegistryClientConfig, RegistryName } from './types.js';
|
|
||||||
import { RegistrySource } from './base.js';
|
|
||||||
import { OfficialRegistrySource } from './sources/official.js';
|
|
||||||
import { GlamaRegistrySource } from './sources/glama.js';
|
|
||||||
import { SmitheryRegistrySource } from './sources/smithery.js';
|
|
||||||
import { RegistryCache } from './cache.js';
|
|
||||||
import { deduplicateResults } from './dedup.js';
|
|
||||||
import { rankResults } from './ranking.js';
|
|
||||||
|
|
||||||
export class RegistryClient {
|
|
||||||
private sources: Map<RegistryName, RegistrySource>;
|
|
||||||
private cache: RegistryCache;
|
|
||||||
private enabledRegistries: RegistryName[];
|
|
||||||
private metrics = {
|
|
||||||
queryLatencies: new Map<string, number[]>(),
|
|
||||||
errorCounts: new Map<string, number>(),
|
|
||||||
};
|
|
||||||
|
|
||||||
constructor(config: RegistryClientConfig = {}) {
|
|
||||||
this.enabledRegistries = config.registries ?? ['official', 'glama', 'smithery'];
|
|
||||||
this.cache = new RegistryCache(config.cacheTTLMs);
|
|
||||||
|
|
||||||
this.sources = new Map<RegistryName, RegistrySource>([
|
|
||||||
['official', new OfficialRegistrySource()],
|
|
||||||
['glama', new GlamaRegistrySource()],
|
|
||||||
['smithery', new SmitheryRegistrySource()],
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
|
|
||||||
async search(options: SearchOptions): Promise<RegistryServer[]> {
|
|
||||||
// Check cache
|
|
||||||
const cached = this.cache.get(options.query, options);
|
|
||||||
if (cached !== null) {
|
|
||||||
return cached;
|
|
||||||
}
|
|
||||||
|
|
||||||
const registries = options.registries ?? this.enabledRegistries;
|
|
||||||
const limit = options.limit ?? 20;
|
|
||||||
|
|
||||||
// Query all enabled registries in parallel
|
|
||||||
const promises = registries
|
|
||||||
.map((name) => this.sources.get(name))
|
|
||||||
.filter((source): source is RegistrySource => source !== undefined)
|
|
||||||
.map(async (source) => {
|
|
||||||
const start = Date.now();
|
|
||||||
try {
|
|
||||||
const results = await source.search(options.query, limit);
|
|
||||||
this.recordLatency(source.name, Date.now() - start);
|
|
||||||
return results;
|
|
||||||
} catch (error) {
|
|
||||||
this.recordError(source.name);
|
|
||||||
// Graceful degradation: log and continue
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const settled = await Promise.all(promises);
|
|
||||||
let combined = settled.flat();
|
|
||||||
|
|
||||||
// Apply filters
|
|
||||||
if (options.verified === true) {
|
|
||||||
combined = combined.filter((s) => s.verified);
|
|
||||||
}
|
|
||||||
if (options.transport !== undefined) {
|
|
||||||
combined = combined.filter((s) => s.transport === options.transport);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Deduplicate, rank, and limit
|
|
||||||
const deduped = deduplicateResults(combined);
|
|
||||||
const ranked = rankResults(deduped, options.query);
|
|
||||||
const results = ranked.slice(0, limit);
|
|
||||||
|
|
||||||
// Cache results
|
|
||||||
this.cache.set(options.query, options, results);
|
|
||||||
|
|
||||||
return results;
|
|
||||||
}
|
|
||||||
|
|
||||||
getCacheMetrics(): { hits: number; misses: number; ratio: number } {
|
|
||||||
return this.cache.getHitRatio();
|
|
||||||
}
|
|
||||||
|
|
||||||
getQueryLatencies(): Map<string, number[]> {
|
|
||||||
return new Map(this.metrics.queryLatencies);
|
|
||||||
}
|
|
||||||
|
|
||||||
getErrorCounts(): Map<string, number> {
|
|
||||||
return new Map(this.metrics.errorCounts);
|
|
||||||
}
|
|
||||||
|
|
||||||
clearCache(): void {
|
|
||||||
this.cache.clear();
|
|
||||||
}
|
|
||||||
|
|
||||||
private recordLatency(source: string, ms: number): void {
|
|
||||||
const existing = this.metrics.queryLatencies.get(source) ?? [];
|
|
||||||
existing.push(ms);
|
|
||||||
this.metrics.queryLatencies.set(source, existing);
|
|
||||||
}
|
|
||||||
|
|
||||||
private recordError(source: string): void {
|
|
||||||
const count = this.metrics.errorCounts.get(source) ?? 0;
|
|
||||||
this.metrics.errorCounts.set(source, count + 1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
export { RegistryClient } from './client.js';
|
|
||||||
export { RegistryCache } from './cache.js';
|
|
||||||
export { RegistrySource } from './base.js';
|
|
||||||
export { deduplicateResults } from './dedup.js';
|
|
||||||
export { rankResults } from './ranking.js';
|
|
||||||
export { withRetry } from './retry.js';
|
|
||||||
export { OfficialRegistrySource } from './sources/official.js';
|
|
||||||
export { GlamaRegistrySource } from './sources/glama.js';
|
|
||||||
export { SmitheryRegistrySource } from './sources/smithery.js';
|
|
||||||
export type {
|
|
||||||
RegistryServer,
|
|
||||||
SearchOptions,
|
|
||||||
RegistryClientConfig,
|
|
||||||
RegistryName,
|
|
||||||
EnvVar,
|
|
||||||
} from './types.js';
|
|
||||||
export { sanitizeString } from './types.js';
|
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
import type { RegistryServer } from './types.js';
|
|
||||||
|
|
||||||
const WEIGHT_RELEVANCE = 0.4;
|
|
||||||
const WEIGHT_POPULARITY = 0.3;
|
|
||||||
const WEIGHT_VERIFIED = 0.2;
|
|
||||||
const WEIGHT_RECENCY = 0.1;
|
|
||||||
|
|
||||||
function textRelevance(server: RegistryServer, query: string): number {
|
|
||||||
const q = query.toLowerCase();
|
|
||||||
const name = server.name.toLowerCase();
|
|
||||||
const desc = server.description.toLowerCase();
|
|
||||||
|
|
||||||
// Exact name match
|
|
||||||
if (name === q) return 1.0;
|
|
||||||
// Name starts with query
|
|
||||||
if (name.startsWith(q)) return 0.9;
|
|
||||||
// Name contains query
|
|
||||||
if (name.includes(q)) return 0.7;
|
|
||||||
// Description contains query
|
|
||||||
if (desc.includes(q)) return 0.4;
|
|
||||||
|
|
||||||
// Word-level matching
|
|
||||||
const queryWords = q.split(/\s+/);
|
|
||||||
const matchCount = queryWords.filter(
|
|
||||||
(w) => name.includes(w) || desc.includes(w),
|
|
||||||
).length;
|
|
||||||
return queryWords.length > 0 ? (matchCount / queryWords.length) * 0.3 : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
function popularityScore(server: RegistryServer): number {
|
|
||||||
// Normalize to 0-1 range; use log scale since popularity can vary hugely
|
|
||||||
if (server.popularityScore <= 0) return 0;
|
|
||||||
// Log scale: log10(1) = 0, log10(10000) ≈ 4 → normalize to 0-1 with cap at 100k
|
|
||||||
return Math.min(Math.log10(server.popularityScore + 1) / 5, 1.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
function verifiedScore(server: RegistryServer): number {
|
|
||||||
return server.verified ? 1.0 : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
function recencyScore(server: RegistryServer): number {
|
|
||||||
if (server.lastUpdated === undefined) return 0.5; // Unknown = middle score
|
|
||||||
const ageMs = Date.now() - server.lastUpdated.getTime();
|
|
||||||
const ageDays = ageMs / (1000 * 60 * 60 * 24);
|
|
||||||
// Less than 30 days = 1.0, decays to 0 at 365 days
|
|
||||||
return Math.max(0, 1 - ageDays / 365);
|
|
||||||
}
|
|
||||||
|
|
||||||
function computeScore(server: RegistryServer, query: string): number {
|
|
||||||
return (
|
|
||||||
WEIGHT_RELEVANCE * textRelevance(server, query) +
|
|
||||||
WEIGHT_POPULARITY * popularityScore(server) +
|
|
||||||
WEIGHT_VERIFIED * verifiedScore(server) +
|
|
||||||
WEIGHT_RECENCY * recencyScore(server)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function rankResults(
|
|
||||||
results: RegistryServer[],
|
|
||||||
query: string,
|
|
||||||
): RegistryServer[] {
|
|
||||||
return [...results].sort((a, b) => computeScore(b, query) - computeScore(a, query));
|
|
||||||
}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
export async function withRetry<T>(
|
|
||||||
fn: () => Promise<T>,
|
|
||||||
maxRetries = 3,
|
|
||||||
baseDelay = 1000,
|
|
||||||
): Promise<T> {
|
|
||||||
for (let attempt = 0; attempt < maxRetries; attempt++) {
|
|
||||||
try {
|
|
||||||
return await fn();
|
|
||||||
} catch (error) {
|
|
||||||
if (attempt === maxRetries - 1) throw error;
|
|
||||||
const delay = baseDelay * Math.pow(2, attempt) + Math.random() * 1000;
|
|
||||||
await new Promise((r) => setTimeout(r, delay));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
throw new Error('Unreachable');
|
|
||||||
}
|
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
import { RegistrySource } from '../base.js';
|
|
||||||
import {
|
|
||||||
GlamaRegistryResponseSchema,
|
|
||||||
sanitizeString,
|
|
||||||
type GlamaServerEntry,
|
|
||||||
type RegistryServer,
|
|
||||||
} from '../types.js';
|
|
||||||
import { withRetry } from '../retry.js';
|
|
||||||
|
|
||||||
const BASE_URL = 'https://glama.ai/api/mcp/v1/servers';
|
|
||||||
|
|
||||||
export class GlamaRegistrySource extends RegistrySource {
|
|
||||||
readonly name = 'glama' as const;
|
|
||||||
|
|
||||||
async search(query: string, limit: number): Promise<RegistryServer[]> {
|
|
||||||
const results: RegistryServer[] = [];
|
|
||||||
let cursor: string | null | undefined;
|
|
||||||
|
|
||||||
while (results.length < limit) {
|
|
||||||
const url = new URL(BASE_URL);
|
|
||||||
url.searchParams.set('query', query);
|
|
||||||
if (cursor !== undefined && cursor !== null) {
|
|
||||||
url.searchParams.set('after', cursor);
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await withRetry(() => fetch(url.toString()));
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Glama registry returned ${String(response.status)}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const raw: unknown = await response.json();
|
|
||||||
const parsed = GlamaRegistryResponseSchema.parse(raw);
|
|
||||||
|
|
||||||
for (const entry of parsed.servers) {
|
|
||||||
results.push(this.normalizeResult(entry));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!parsed.pageInfo.hasNextPage || parsed.servers.length === 0) break;
|
|
||||||
cursor = parsed.pageInfo.endCursor;
|
|
||||||
}
|
|
||||||
|
|
||||||
return results.slice(0, limit);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected normalizeResult(raw: unknown): RegistryServer {
|
|
||||||
const entry = raw as GlamaServerEntry;
|
|
||||||
|
|
||||||
// Extract env vars from JSON Schema
|
|
||||||
const props = entry.environmentVariablesJsonSchema?.properties ?? {};
|
|
||||||
const envTemplate = Object.entries(props).map(([name, schemaProp]) => {
|
|
||||||
const envVar: import('../types.js').EnvVar = {
|
|
||||||
name,
|
|
||||||
description: sanitizeString(schemaProp.description ?? ''),
|
|
||||||
isSecret: name.toLowerCase().includes('token') ||
|
|
||||||
name.toLowerCase().includes('secret') ||
|
|
||||||
name.toLowerCase().includes('password') ||
|
|
||||||
name.toLowerCase().includes('key'),
|
|
||||||
};
|
|
||||||
if (schemaProp.default !== undefined) {
|
|
||||||
envVar.defaultValue = schemaProp.default;
|
|
||||||
}
|
|
||||||
return envVar;
|
|
||||||
});
|
|
||||||
|
|
||||||
// Determine transport from attributes
|
|
||||||
const attrs = entry.attributes;
|
|
||||||
let transport: RegistryServer['transport'] = 'stdio';
|
|
||||||
if (attrs.includes('hosting:remote-capable') || attrs.includes('hosting:hybrid')) {
|
|
||||||
transport = 'sse';
|
|
||||||
}
|
|
||||||
|
|
||||||
const packages: RegistryServer['packages'] = {};
|
|
||||||
if (entry.slug !== '') {
|
|
||||||
packages.npm = entry.slug;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result: RegistryServer = {
|
|
||||||
name: sanitizeString(entry.name),
|
|
||||||
description: sanitizeString(entry.description),
|
|
||||||
packages,
|
|
||||||
envTemplate,
|
|
||||||
transport,
|
|
||||||
popularityScore: 0, // Glama has no popularity metrics in list
|
|
||||||
verified: attrs.includes('author:official'),
|
|
||||||
sourceRegistry: 'glama',
|
|
||||||
};
|
|
||||||
if (entry.repository?.url !== undefined) {
|
|
||||||
result.repositoryUrl = entry.repository.url;
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,106 +0,0 @@
|
|||||||
import { RegistrySource } from '../base.js';
|
|
||||||
import {
|
|
||||||
OfficialRegistryResponseSchema,
|
|
||||||
sanitizeString,
|
|
||||||
type OfficialServerEntry,
|
|
||||||
type RegistryServer,
|
|
||||||
} from '../types.js';
|
|
||||||
import { withRetry } from '../retry.js';
|
|
||||||
|
|
||||||
const BASE_URL = 'https://registry.modelcontextprotocol.io/v0/servers';
|
|
||||||
|
|
||||||
export class OfficialRegistrySource extends RegistrySource {
|
|
||||||
readonly name = 'official' as const;
|
|
||||||
|
|
||||||
async search(query: string, limit: number): Promise<RegistryServer[]> {
|
|
||||||
const results: RegistryServer[] = [];
|
|
||||||
let cursor: string | null | undefined;
|
|
||||||
|
|
||||||
while (results.length < limit) {
|
|
||||||
const url = new URL(BASE_URL);
|
|
||||||
url.searchParams.set('search', query);
|
|
||||||
url.searchParams.set('limit', String(Math.min(limit - results.length, 100)));
|
|
||||||
if (cursor !== undefined && cursor !== null) {
|
|
||||||
url.searchParams.set('cursor', cursor);
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await withRetry(() => fetch(url.toString()));
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Official registry returned ${String(response.status)}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const raw: unknown = await response.json();
|
|
||||||
const parsed = OfficialRegistryResponseSchema.parse(raw);
|
|
||||||
|
|
||||||
for (const entry of parsed.servers) {
|
|
||||||
results.push(this.normalizeResult(entry));
|
|
||||||
}
|
|
||||||
|
|
||||||
cursor = parsed.metadata?.nextCursor;
|
|
||||||
if (cursor === null || cursor === undefined || parsed.servers.length === 0) break;
|
|
||||||
}
|
|
||||||
|
|
||||||
return results.slice(0, limit);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected normalizeResult(raw: unknown): RegistryServer {
|
|
||||||
const entry = raw as OfficialServerEntry;
|
|
||||||
const server = entry.server;
|
|
||||||
|
|
||||||
// Extract env vars from packages
|
|
||||||
const envTemplate = server.packages.flatMap((pkg: { environmentVariables: Array<{ name: string; description?: string; isSecret?: boolean }> }) =>
|
|
||||||
pkg.environmentVariables.map((ev: { name: string; description?: string; isSecret?: boolean }) => ({
|
|
||||||
name: ev.name,
|
|
||||||
description: sanitizeString(ev.description ?? ''),
|
|
||||||
isSecret: ev.isSecret ?? false,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Determine transport from packages or remotes
|
|
||||||
let transport: RegistryServer['transport'] = 'stdio';
|
|
||||||
if (server.packages.length > 0) {
|
|
||||||
const pkgTransport = server.packages[0]?.transport?.type;
|
|
||||||
if (pkgTransport === 'stdio') transport = 'stdio';
|
|
||||||
}
|
|
||||||
if (server.remotes.length > 0) {
|
|
||||||
const remoteType = server.remotes[0]?.type;
|
|
||||||
if (remoteType === 'sse') transport = 'sse';
|
|
||||||
else if (remoteType === 'streamable-http') transport = 'streamable-http';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract npm package identifier
|
|
||||||
const npmPkg = server.packages.find((p: { registryType: string }) => p.registryType === 'npm');
|
|
||||||
const dockerPkg = server.packages.find((p: { registryType: string }) => p.registryType === 'oci');
|
|
||||||
|
|
||||||
// Extract dates from _meta
|
|
||||||
const meta = entry._meta as Record<string, Record<string, unknown>> | undefined;
|
|
||||||
const officialMeta = meta?.['io.modelcontextprotocol.registry/official'];
|
|
||||||
const updatedAt = officialMeta?.['updatedAt'];
|
|
||||||
|
|
||||||
const packages: RegistryServer['packages'] = {};
|
|
||||||
if (npmPkg !== undefined) {
|
|
||||||
packages.npm = npmPkg.identifier;
|
|
||||||
}
|
|
||||||
if (dockerPkg !== undefined) {
|
|
||||||
packages.docker = dockerPkg.identifier;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result: RegistryServer = {
|
|
||||||
name: sanitizeString(server.title ?? server.name),
|
|
||||||
description: sanitizeString(server.description),
|
|
||||||
packages,
|
|
||||||
envTemplate,
|
|
||||||
transport,
|
|
||||||
popularityScore: 0, // Official registry has no popularity data
|
|
||||||
verified: false, // Official registry has no verified badges
|
|
||||||
sourceRegistry: 'official',
|
|
||||||
};
|
|
||||||
if (server.repository?.url !== undefined) {
|
|
||||||
result.repositoryUrl = server.repository.url;
|
|
||||||
}
|
|
||||||
if (typeof updatedAt === 'string') {
|
|
||||||
result.lastUpdated = new Date(updatedAt);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
import { RegistrySource } from '../base.js';
|
|
||||||
import {
|
|
||||||
SmitheryRegistryResponseSchema,
|
|
||||||
sanitizeString,
|
|
||||||
type SmitheryServerEntry,
|
|
||||||
type RegistryServer,
|
|
||||||
} from '../types.js';
|
|
||||||
import { withRetry } from '../retry.js';
|
|
||||||
|
|
||||||
const BASE_URL = 'https://registry.smithery.ai/servers';
|
|
||||||
|
|
||||||
export class SmitheryRegistrySource extends RegistrySource {
|
|
||||||
readonly name = 'smithery' as const;
|
|
||||||
|
|
||||||
async search(query: string, limit: number): Promise<RegistryServer[]> {
|
|
||||||
const results: RegistryServer[] = [];
|
|
||||||
let page = 1;
|
|
||||||
|
|
||||||
while (results.length < limit) {
|
|
||||||
const url = new URL(BASE_URL);
|
|
||||||
url.searchParams.set('q', query);
|
|
||||||
url.searchParams.set('pageSize', String(Math.min(limit - results.length, 50)));
|
|
||||||
url.searchParams.set('page', String(page));
|
|
||||||
|
|
||||||
const response = await withRetry(() => fetch(url.toString()));
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`Smithery registry returned ${String(response.status)}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const raw: unknown = await response.json();
|
|
||||||
const parsed = SmitheryRegistryResponseSchema.parse(raw);
|
|
||||||
|
|
||||||
for (const entry of parsed.servers) {
|
|
||||||
results.push(this.normalizeResult(entry));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (page >= parsed.pagination.totalPages || parsed.servers.length === 0) break;
|
|
||||||
page++;
|
|
||||||
}
|
|
||||||
|
|
||||||
return results.slice(0, limit);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected normalizeResult(raw: unknown): RegistryServer {
|
|
||||||
const entry = raw as SmitheryServerEntry;
|
|
||||||
|
|
||||||
const result: RegistryServer = {
|
|
||||||
name: sanitizeString(entry.displayName !== '' ? entry.displayName : entry.qualifiedName),
|
|
||||||
description: sanitizeString(entry.description),
|
|
||||||
packages: {},
|
|
||||||
envTemplate: [], // Smithery doesn't include env vars in list view
|
|
||||||
transport: entry.remote ? 'sse' : 'stdio',
|
|
||||||
popularityScore: entry.useCount,
|
|
||||||
verified: entry.verified,
|
|
||||||
sourceRegistry: 'smithery',
|
|
||||||
};
|
|
||||||
if (entry.createdAt !== undefined) {
|
|
||||||
result.lastUpdated = new Date(entry.createdAt);
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -173,7 +173,7 @@ export type SmitheryServerEntry = z.infer<typeof SmitheryServerSchema>;
|
|||||||
|
|
||||||
// ── Security utilities ──
|
// ── Security utilities ──
|
||||||
|
|
||||||
const ANSI_ESCAPE_RE = /\x1b\[[0-9;]*[a-zA-Z]|[\x00-\x08\x0B\x0C\x0E-\x1A\x1C-\x1F]|\x1b/g;
|
const ANSI_ESCAPE_RE = /\x1b\[[0-9;]*[a-zA-Z]|[\x00-\x08\x0B\x0C\x0E-\x1F]/g;
|
||||||
|
|
||||||
export function sanitizeString(text: string): string {
|
export function sanitizeString(text: string): string {
|
||||||
return text.replace(ANSI_ESCAPE_RE, '');
|
return text.replace(ANSI_ESCAPE_RE, '');
|
||||||
|
|||||||
@@ -1,100 +0,0 @@
|
|||||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
|
||||||
import http from 'node:http';
|
|
||||||
import { ApiClient, ApiError } from '../src/api-client.js';
|
|
||||||
|
|
||||||
let server: http.Server;
|
|
||||||
let port: number;
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
|
||||||
server = http.createServer((req, res) => {
|
|
||||||
if (req.url === '/api/v1/servers' && req.method === 'GET') {
|
|
||||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
||||||
res.end(JSON.stringify([{ id: 'srv-1', name: 'slack' }]));
|
|
||||||
} else if (req.url === '/api/v1/servers/srv-1' && req.method === 'GET') {
|
|
||||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
||||||
res.end(JSON.stringify({ id: 'srv-1', name: 'slack', transport: 'STDIO' }));
|
|
||||||
} else if (req.url === '/api/v1/servers' && req.method === 'POST') {
|
|
||||||
const chunks: Buffer[] = [];
|
|
||||||
req.on('data', (c: Buffer) => chunks.push(c));
|
|
||||||
req.on('end', () => {
|
|
||||||
const body = JSON.parse(Buffer.concat(chunks).toString());
|
|
||||||
res.writeHead(201, { 'Content-Type': 'application/json' });
|
|
||||||
res.end(JSON.stringify({ id: 'srv-new', ...body }));
|
|
||||||
});
|
|
||||||
} else if (req.url === '/api/v1/missing' && req.method === 'GET') {
|
|
||||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
|
||||||
res.end(JSON.stringify({ error: 'Not found' }));
|
|
||||||
} else {
|
|
||||||
res.writeHead(404);
|
|
||||||
res.end();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
await new Promise<void>((resolve) => {
|
|
||||||
server.listen(0, () => {
|
|
||||||
const addr = server.address();
|
|
||||||
if (addr && typeof addr === 'object') {
|
|
||||||
port = addr.port;
|
|
||||||
}
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
afterAll(() => {
|
|
||||||
server.close();
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('ApiClient', () => {
|
|
||||||
it('performs GET request for list', async () => {
|
|
||||||
const client = new ApiClient({ baseUrl: `http://localhost:${port}` });
|
|
||||||
const result = await client.get<Array<{ id: string; name: string }>>('/api/v1/servers');
|
|
||||||
expect(result).toEqual([{ id: 'srv-1', name: 'slack' }]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('performs GET request for single item', async () => {
|
|
||||||
const client = new ApiClient({ baseUrl: `http://localhost:${port}` });
|
|
||||||
const result = await client.get<{ id: string; name: string }>('/api/v1/servers/srv-1');
|
|
||||||
expect(result.name).toBe('slack');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('performs POST request', async () => {
|
|
||||||
const client = new ApiClient({ baseUrl: `http://localhost:${port}` });
|
|
||||||
const result = await client.post<{ id: string; name: string }>('/api/v1/servers', { name: 'github' });
|
|
||||||
expect(result.id).toBe('srv-new');
|
|
||||||
expect(result.name).toBe('github');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws ApiError on 404', async () => {
|
|
||||||
const client = new ApiClient({ baseUrl: `http://localhost:${port}` });
|
|
||||||
await expect(client.get('/api/v1/missing')).rejects.toThrow(ApiError);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws on connection error', async () => {
|
|
||||||
const client = new ApiClient({ baseUrl: 'http://localhost:1' });
|
|
||||||
await expect(client.get('/anything')).rejects.toThrow();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sends Authorization header when token provided', async () => {
|
|
||||||
// We need a separate server to check the header
|
|
||||||
let receivedAuth = '';
|
|
||||||
const authServer = http.createServer((req, res) => {
|
|
||||||
receivedAuth = req.headers['authorization'] ?? '';
|
|
||||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
||||||
res.end(JSON.stringify({ ok: true }));
|
|
||||||
});
|
|
||||||
const authPort = await new Promise<number>((resolve) => {
|
|
||||||
authServer.listen(0, () => {
|
|
||||||
const addr = authServer.address();
|
|
||||||
if (addr && typeof addr === 'object') resolve(addr.port);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
try {
|
|
||||||
const client = new ApiClient({ baseUrl: `http://localhost:${authPort}`, token: 'my-token' });
|
|
||||||
await client.get('/test');
|
|
||||||
expect(receivedAuth).toBe('Bearer my-token');
|
|
||||||
} finally {
|
|
||||||
authServer.close();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
||||||
import { mkdtempSync, rmSync, statSync, existsSync } from 'node:fs';
|
|
||||||
import { join } from 'node:path';
|
|
||||||
import { tmpdir } from 'node:os';
|
|
||||||
import { saveCredentials, loadCredentials, deleteCredentials } from '../../src/auth/index.js';
|
|
||||||
|
|
||||||
let tempDir: string;
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-auth-test-'));
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
rmSync(tempDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('saveCredentials', () => {
|
|
||||||
it('saves credentials file', () => {
|
|
||||||
saveCredentials({ token: 'tok123', mcpdUrl: 'http://x:3100', user: 'alice@test.com' }, { configDir: tempDir });
|
|
||||||
expect(existsSync(join(tempDir, 'credentials'))).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('sets 0600 permissions', () => {
|
|
||||||
saveCredentials({ token: 'tok123', mcpdUrl: 'http://x:3100', user: 'alice@test.com' }, { configDir: tempDir });
|
|
||||||
const stat = statSync(join(tempDir, 'credentials'));
|
|
||||||
expect(stat.mode & 0o777).toBe(0o600);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates config dir if missing', () => {
|
|
||||||
const nested = join(tempDir, 'sub', 'dir');
|
|
||||||
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'bob' }, { configDir: nested });
|
|
||||||
expect(existsSync(join(nested, 'credentials'))).toBe(true);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('loadCredentials', () => {
|
|
||||||
it('returns null when no credentials file', () => {
|
|
||||||
expect(loadCredentials({ configDir: tempDir })).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('round-trips credentials', () => {
|
|
||||||
const creds = { token: 'tok456', mcpdUrl: 'http://remote:3100', user: 'charlie@test.com', expiresAt: '2099-01-01' };
|
|
||||||
saveCredentials(creds, { configDir: tempDir });
|
|
||||||
const loaded = loadCredentials({ configDir: tempDir });
|
|
||||||
expect(loaded).toEqual(creds);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('deleteCredentials', () => {
|
|
||||||
it('returns false when no credentials file', () => {
|
|
||||||
expect(deleteCredentials({ configDir: tempDir })).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('deletes credentials file', () => {
|
|
||||||
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'u' }, { configDir: tempDir });
|
|
||||||
expect(deleteCredentials({ configDir: tempDir })).toBe(true);
|
|
||||||
expect(existsSync(join(tempDir, 'credentials'))).toBe(false);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -24,10 +24,9 @@ describe('createProgram', () => {
|
|||||||
expect(status).toBeDefined();
|
expect(status).toBeDefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('subcommands have output option', () => {
|
it('has output option', () => {
|
||||||
const program = createProgram();
|
const program = createProgram();
|
||||||
const get = program.commands.find((c) => c.name() === 'get');
|
const opt = program.options.find((o) => o.long === '--output');
|
||||||
const opt = get?.options.find((o) => o.long === '--output');
|
|
||||||
expect(opt).toBeDefined();
|
expect(opt).toBeDefined();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,505 +0,0 @@
|
|||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
||||||
import { writeFileSync, mkdtempSync, rmSync } from 'node:fs';
|
|
||||||
import { join } from 'node:path';
|
|
||||||
import { tmpdir } from 'node:os';
|
|
||||||
import { createApplyCommand } from '../../src/commands/apply.js';
|
|
||||||
import type { ApiClient } from '../../src/api-client.js';
|
|
||||||
|
|
||||||
function mockClient(): ApiClient {
|
|
||||||
return {
|
|
||||||
get: vi.fn(async () => []),
|
|
||||||
post: vi.fn(async () => ({ id: 'new-id', name: 'test' })),
|
|
||||||
put: vi.fn(async () => ({ id: 'existing-id', name: 'test' })),
|
|
||||||
delete: vi.fn(async () => {}),
|
|
||||||
} as unknown as ApiClient;
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('apply command', () => {
|
|
||||||
let client: ReturnType<typeof mockClient>;
|
|
||||||
let output: string[];
|
|
||||||
let tmpDir: string;
|
|
||||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
client = mockClient();
|
|
||||||
output = [];
|
|
||||||
tmpDir = mkdtempSync(join(tmpdir(), 'mcpctl-test-'));
|
|
||||||
});
|
|
||||||
|
|
||||||
it('applies servers from YAML file', async () => {
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
servers:
|
|
||||||
- name: slack
|
|
||||||
description: Slack MCP server
|
|
||||||
transport: STDIO
|
|
||||||
packageName: "@anthropic/slack-mcp"
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/servers', expect.objectContaining({ name: 'slack' }));
|
|
||||||
expect(output.join('\n')).toContain('Created server: slack');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('applies servers from JSON file', async () => {
|
|
||||||
const configPath = join(tmpDir, 'config.json');
|
|
||||||
writeFileSync(configPath, JSON.stringify({
|
|
||||||
servers: [{ name: 'github', transport: 'STDIO' }],
|
|
||||||
}));
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/servers', expect.objectContaining({ name: 'github' }));
|
|
||||||
expect(output.join('\n')).toContain('Created server: github');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates existing servers', async () => {
|
|
||||||
vi.mocked(client.get).mockResolvedValue([{ id: 'srv-1', name: 'slack' }]);
|
|
||||||
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
servers:
|
|
||||||
- name: slack
|
|
||||||
description: Updated description
|
|
||||||
transport: STDIO
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.put).toHaveBeenCalledWith('/api/v1/servers/srv-1', expect.objectContaining({ name: 'slack' }));
|
|
||||||
expect(output.join('\n')).toContain('Updated server: slack');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('supports dry-run mode', async () => {
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
servers:
|
|
||||||
- name: test
|
|
||||||
transport: STDIO
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath, '--dry-run'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).not.toHaveBeenCalled();
|
|
||||||
expect(output.join('\n')).toContain('Dry run');
|
|
||||||
expect(output.join('\n')).toContain('1 server(s)');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('applies secrets', async () => {
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
secrets:
|
|
||||||
- name: ha-creds
|
|
||||||
data:
|
|
||||||
TOKEN: abc123
|
|
||||||
URL: https://ha.local
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/secrets', expect.objectContaining({
|
|
||||||
name: 'ha-creds',
|
|
||||||
data: { TOKEN: 'abc123', URL: 'https://ha.local' },
|
|
||||||
}));
|
|
||||||
expect(output.join('\n')).toContain('Created secret: ha-creds');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates existing secrets', async () => {
|
|
||||||
vi.mocked(client.get).mockImplementation(async (url: string) => {
|
|
||||||
if (url === '/api/v1/secrets') return [{ id: 'sec-1', name: 'ha-creds' }];
|
|
||||||
return [];
|
|
||||||
});
|
|
||||||
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
secrets:
|
|
||||||
- name: ha-creds
|
|
||||||
data:
|
|
||||||
TOKEN: new-token
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.put).toHaveBeenCalledWith('/api/v1/secrets/sec-1', { data: { TOKEN: 'new-token' } });
|
|
||||||
expect(output.join('\n')).toContain('Updated secret: ha-creds');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('applies projects', async () => {
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
projects:
|
|
||||||
- name: my-project
|
|
||||||
description: A test project
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/projects', expect.objectContaining({ name: 'my-project' }));
|
|
||||||
expect(output.join('\n')).toContain('Created project: my-project');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('applies users (no role field)', async () => {
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
users:
|
|
||||||
- email: alice@test.com
|
|
||||||
password: password123
|
|
||||||
name: Alice
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
const callBody = vi.mocked(client.post).mock.calls[0]![1] as Record<string, unknown>;
|
|
||||||
expect(callBody).toEqual(expect.objectContaining({
|
|
||||||
email: 'alice@test.com',
|
|
||||||
password: 'password123',
|
|
||||||
name: 'Alice',
|
|
||||||
}));
|
|
||||||
expect(callBody).not.toHaveProperty('role');
|
|
||||||
expect(output.join('\n')).toContain('Created user: alice@test.com');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates existing users matched by email', async () => {
|
|
||||||
vi.mocked(client.get).mockImplementation(async (url: string) => {
|
|
||||||
if (url === '/api/v1/users') return [{ id: 'usr-1', email: 'alice@test.com' }];
|
|
||||||
return [];
|
|
||||||
});
|
|
||||||
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
users:
|
|
||||||
- email: alice@test.com
|
|
||||||
password: newpassword
|
|
||||||
name: Alice Updated
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.put).toHaveBeenCalledWith('/api/v1/users/usr-1', expect.objectContaining({
|
|
||||||
email: 'alice@test.com',
|
|
||||||
name: 'Alice Updated',
|
|
||||||
}));
|
|
||||||
expect(output.join('\n')).toContain('Updated user: alice@test.com');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('applies groups', async () => {
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
groups:
|
|
||||||
- name: dev-team
|
|
||||||
description: Development team
|
|
||||||
members:
|
|
||||||
- alice@test.com
|
|
||||||
- bob@test.com
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/groups', expect.objectContaining({
|
|
||||||
name: 'dev-team',
|
|
||||||
description: 'Development team',
|
|
||||||
members: ['alice@test.com', 'bob@test.com'],
|
|
||||||
}));
|
|
||||||
expect(output.join('\n')).toContain('Created group: dev-team');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates existing groups', async () => {
|
|
||||||
vi.mocked(client.get).mockImplementation(async (url: string) => {
|
|
||||||
if (url === '/api/v1/groups') return [{ id: 'grp-1', name: 'dev-team' }];
|
|
||||||
return [];
|
|
||||||
});
|
|
||||||
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
groups:
|
|
||||||
- name: dev-team
|
|
||||||
description: Updated devs
|
|
||||||
members:
|
|
||||||
- new@test.com
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.put).toHaveBeenCalledWith('/api/v1/groups/grp-1', expect.objectContaining({
|
|
||||||
name: 'dev-team',
|
|
||||||
description: 'Updated devs',
|
|
||||||
}));
|
|
||||||
expect(output.join('\n')).toContain('Updated group: dev-team');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('applies rbacBindings', async () => {
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
rbac:
|
|
||||||
- name: developers
|
|
||||||
subjects:
|
|
||||||
- kind: User
|
|
||||||
name: alice@test.com
|
|
||||||
- kind: Group
|
|
||||||
name: dev-team
|
|
||||||
roleBindings:
|
|
||||||
- role: edit
|
|
||||||
resource: servers
|
|
||||||
- role: view
|
|
||||||
resource: instances
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', expect.objectContaining({
|
|
||||||
name: 'developers',
|
|
||||||
subjects: [
|
|
||||||
{ kind: 'User', name: 'alice@test.com' },
|
|
||||||
{ kind: 'Group', name: 'dev-team' },
|
|
||||||
],
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'edit', resource: 'servers' },
|
|
||||||
{ role: 'view', resource: 'instances' },
|
|
||||||
],
|
|
||||||
}));
|
|
||||||
expect(output.join('\n')).toContain('Created rbacBinding: developers');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates existing rbacBindings', async () => {
|
|
||||||
vi.mocked(client.get).mockImplementation(async (url: string) => {
|
|
||||||
if (url === '/api/v1/rbac') return [{ id: 'rbac-1', name: 'developers' }];
|
|
||||||
return [];
|
|
||||||
});
|
|
||||||
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
rbacBindings:
|
|
||||||
- name: developers
|
|
||||||
subjects:
|
|
||||||
- kind: User
|
|
||||||
name: new@test.com
|
|
||||||
roleBindings:
|
|
||||||
- role: edit
|
|
||||||
resource: "*"
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.put).toHaveBeenCalledWith('/api/v1/rbac/rbac-1', expect.objectContaining({
|
|
||||||
name: 'developers',
|
|
||||||
}));
|
|
||||||
expect(output.join('\n')).toContain('Updated rbacBinding: developers');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('applies projects with servers', async () => {
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
projects:
|
|
||||||
- name: smart-home
|
|
||||||
description: Home automation
|
|
||||||
proxyMode: filtered
|
|
||||||
llmProvider: gemini-cli
|
|
||||||
llmModel: gemini-2.0-flash
|
|
||||||
servers:
|
|
||||||
- my-grafana
|
|
||||||
- my-ha
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/projects', expect.objectContaining({
|
|
||||||
name: 'smart-home',
|
|
||||||
proxyMode: 'filtered',
|
|
||||||
llmProvider: 'gemini-cli',
|
|
||||||
llmModel: 'gemini-2.0-flash',
|
|
||||||
servers: ['my-grafana', 'my-ha'],
|
|
||||||
}));
|
|
||||||
expect(output.join('\n')).toContain('Created project: smart-home');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('dry-run shows all new resource types', async () => {
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
secrets:
|
|
||||||
- name: creds
|
|
||||||
data:
|
|
||||||
TOKEN: abc
|
|
||||||
users:
|
|
||||||
- email: alice@test.com
|
|
||||||
password: password123
|
|
||||||
groups:
|
|
||||||
- name: dev-team
|
|
||||||
members: []
|
|
||||||
projects:
|
|
||||||
- name: my-proj
|
|
||||||
description: A project
|
|
||||||
rbacBindings:
|
|
||||||
- name: admins
|
|
||||||
subjects:
|
|
||||||
- kind: User
|
|
||||||
name: admin@test.com
|
|
||||||
roleBindings:
|
|
||||||
- role: edit
|
|
||||||
resource: "*"
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath, '--dry-run'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).not.toHaveBeenCalled();
|
|
||||||
const text = output.join('\n');
|
|
||||||
expect(text).toContain('Dry run');
|
|
||||||
expect(text).toContain('1 secret(s)');
|
|
||||||
expect(text).toContain('1 user(s)');
|
|
||||||
expect(text).toContain('1 group(s)');
|
|
||||||
expect(text).toContain('1 project(s)');
|
|
||||||
expect(text).toContain('1 rbacBinding(s)');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('applies resources in correct order', async () => {
|
|
||||||
const callOrder: string[] = [];
|
|
||||||
vi.mocked(client.post).mockImplementation(async (url: string) => {
|
|
||||||
callOrder.push(url);
|
|
||||||
return { id: 'new-id', name: 'test' };
|
|
||||||
});
|
|
||||||
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
rbacBindings:
|
|
||||||
- name: admins
|
|
||||||
subjects:
|
|
||||||
- kind: User
|
|
||||||
name: admin@test.com
|
|
||||||
roleBindings:
|
|
||||||
- role: edit
|
|
||||||
resource: "*"
|
|
||||||
users:
|
|
||||||
- email: admin@test.com
|
|
||||||
password: password123
|
|
||||||
secrets:
|
|
||||||
- name: creds
|
|
||||||
data:
|
|
||||||
KEY: val
|
|
||||||
groups:
|
|
||||||
- name: dev-team
|
|
||||||
servers:
|
|
||||||
- name: my-server
|
|
||||||
transport: STDIO
|
|
||||||
projects:
|
|
||||||
- name: my-proj
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
// Apply order: secrets → servers → users → groups → projects → templates → rbacBindings
|
|
||||||
expect(callOrder[0]).toBe('/api/v1/secrets');
|
|
||||||
expect(callOrder[1]).toBe('/api/v1/servers');
|
|
||||||
expect(callOrder[2]).toBe('/api/v1/users');
|
|
||||||
expect(callOrder[3]).toBe('/api/v1/groups');
|
|
||||||
expect(callOrder[4]).toBe('/api/v1/projects');
|
|
||||||
expect(callOrder[5]).toBe('/api/v1/rbac');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('applies rbac with operation bindings', async () => {
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
rbac:
|
|
||||||
- name: ops-team
|
|
||||||
subjects:
|
|
||||||
- kind: Group
|
|
||||||
name: ops
|
|
||||||
roleBindings:
|
|
||||||
- role: edit
|
|
||||||
resource: servers
|
|
||||||
- role: run
|
|
||||||
action: backup
|
|
||||||
- role: run
|
|
||||||
action: logs
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', expect.objectContaining({
|
|
||||||
name: 'ops-team',
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'edit', resource: 'servers' },
|
|
||||||
{ role: 'run', action: 'backup' },
|
|
||||||
{ role: 'run', action: 'logs' },
|
|
||||||
],
|
|
||||||
}));
|
|
||||||
expect(output.join('\n')).toContain('Created rbacBinding: ops-team');
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('applies rbac with name-scoped resource binding', async () => {
|
|
||||||
const configPath = join(tmpDir, 'config.yaml');
|
|
||||||
writeFileSync(configPath, `
|
|
||||||
rbac:
|
|
||||||
- name: ha-viewer
|
|
||||||
subjects:
|
|
||||||
- kind: User
|
|
||||||
name: alice@test.com
|
|
||||||
roleBindings:
|
|
||||||
- role: view
|
|
||||||
resource: servers
|
|
||||||
name: my-ha
|
|
||||||
`);
|
|
||||||
|
|
||||||
const cmd = createApplyCommand({ client, log });
|
|
||||||
await cmd.parseAsync([configPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', expect.objectContaining({
|
|
||||||
name: 'ha-viewer',
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'view', resource: 'servers', name: 'my-ha' },
|
|
||||||
],
|
|
||||||
}));
|
|
||||||
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,216 +0,0 @@
|
|||||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
||||||
import { mkdtempSync, rmSync } from 'node:fs';
|
|
||||||
import { join } from 'node:path';
|
|
||||||
import { tmpdir } from 'node:os';
|
|
||||||
import { createLoginCommand, createLogoutCommand } from '../../src/commands/auth.js';
|
|
||||||
import { saveCredentials, loadCredentials } from '../../src/auth/index.js';
|
|
||||||
import { saveConfig, DEFAULT_CONFIG } from '../../src/config/index.js';
|
|
||||||
|
|
||||||
let tempDir: string;
|
|
||||||
let output: string[];
|
|
||||||
|
|
||||||
function log(...args: string[]) {
|
|
||||||
output.push(args.join(' '));
|
|
||||||
}
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-auth-cmd-test-'));
|
|
||||||
output = [];
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
rmSync(tempDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('login command', () => {
|
|
||||||
it('stores credentials on successful login', async () => {
|
|
||||||
const cmd = createLoginCommand({
|
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
prompt: {
|
|
||||||
input: async () => 'alice@test.com',
|
|
||||||
password: async () => 'secret123',
|
|
||||||
},
|
|
||||||
log,
|
|
||||||
loginRequest: async (_url, email, _password) => ({
|
|
||||||
token: 'session-token-123',
|
|
||||||
user: { email },
|
|
||||||
}),
|
|
||||||
logoutRequest: async () => {},
|
|
||||||
statusRequest: async () => ({ hasUsers: true }),
|
|
||||||
bootstrapRequest: async () => ({ token: '', user: { email: '' } }),
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
|
||||||
expect(output[0]).toContain('Logged in as alice@test.com');
|
|
||||||
|
|
||||||
const creds = loadCredentials({ configDir: tempDir });
|
|
||||||
expect(creds).not.toBeNull();
|
|
||||||
expect(creds!.token).toBe('session-token-123');
|
|
||||||
expect(creds!.user).toBe('alice@test.com');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows error on failed login', async () => {
|
|
||||||
const cmd = createLoginCommand({
|
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
prompt: {
|
|
||||||
input: async () => 'alice@test.com',
|
|
||||||
password: async () => 'wrong',
|
|
||||||
},
|
|
||||||
log,
|
|
||||||
loginRequest: async () => { throw new Error('Invalid credentials'); },
|
|
||||||
logoutRequest: async () => {},
|
|
||||||
statusRequest: async () => ({ hasUsers: true }),
|
|
||||||
bootstrapRequest: async () => ({ token: '', user: { email: '' } }),
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
|
||||||
expect(output[0]).toContain('Login failed');
|
|
||||||
expect(output[0]).toContain('Invalid credentials');
|
|
||||||
|
|
||||||
const creds = loadCredentials({ configDir: tempDir });
|
|
||||||
expect(creds).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('uses mcpdUrl from config', async () => {
|
|
||||||
saveConfig({ ...DEFAULT_CONFIG, mcpdUrl: 'http://custom:3100' }, { configDir: tempDir });
|
|
||||||
let capturedUrl = '';
|
|
||||||
const cmd = createLoginCommand({
|
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
prompt: {
|
|
||||||
input: async () => 'user@test.com',
|
|
||||||
password: async () => 'pass',
|
|
||||||
},
|
|
||||||
log,
|
|
||||||
loginRequest: async (url, email) => {
|
|
||||||
capturedUrl = url;
|
|
||||||
return { token: 'tok', user: { email } };
|
|
||||||
},
|
|
||||||
logoutRequest: async () => {},
|
|
||||||
statusRequest: async () => ({ hasUsers: true }),
|
|
||||||
bootstrapRequest: async () => ({ token: '', user: { email: '' } }),
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
|
||||||
expect(capturedUrl).toBe('http://custom:3100');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('allows --mcpd-url flag override', async () => {
|
|
||||||
let capturedUrl = '';
|
|
||||||
const cmd = createLoginCommand({
|
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
prompt: {
|
|
||||||
input: async () => 'user@test.com',
|
|
||||||
password: async () => 'pass',
|
|
||||||
},
|
|
||||||
log,
|
|
||||||
loginRequest: async (url, email) => {
|
|
||||||
capturedUrl = url;
|
|
||||||
return { token: 'tok', user: { email } };
|
|
||||||
},
|
|
||||||
logoutRequest: async () => {},
|
|
||||||
statusRequest: async () => ({ hasUsers: true }),
|
|
||||||
bootstrapRequest: async () => ({ token: '', user: { email: '' } }),
|
|
||||||
});
|
|
||||||
await cmd.parseAsync(['--mcpd-url', 'http://override:3100'], { from: 'user' });
|
|
||||||
expect(capturedUrl).toBe('http://override:3100');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('login bootstrap flow', () => {
|
|
||||||
it('bootstraps first admin when no users exist', async () => {
|
|
||||||
let bootstrapCalled = false;
|
|
||||||
const cmd = createLoginCommand({
|
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
prompt: {
|
|
||||||
input: async (msg) => {
|
|
||||||
if (msg.includes('Name')) return 'Admin User';
|
|
||||||
return 'admin@test.com';
|
|
||||||
},
|
|
||||||
password: async () => 'admin-pass',
|
|
||||||
},
|
|
||||||
log,
|
|
||||||
loginRequest: async () => ({ token: '', user: { email: '' } }),
|
|
||||||
logoutRequest: async () => {},
|
|
||||||
statusRequest: async () => ({ hasUsers: false }),
|
|
||||||
bootstrapRequest: async (_url, email, _password) => {
|
|
||||||
bootstrapCalled = true;
|
|
||||||
return { token: 'admin-token', user: { email } };
|
|
||||||
},
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
|
||||||
|
|
||||||
expect(bootstrapCalled).toBe(true);
|
|
||||||
expect(output.join('\n')).toContain('No users configured');
|
|
||||||
expect(output.join('\n')).toContain('admin@test.com');
|
|
||||||
expect(output.join('\n')).toContain('admin');
|
|
||||||
|
|
||||||
const creds = loadCredentials({ configDir: tempDir });
|
|
||||||
expect(creds).not.toBeNull();
|
|
||||||
expect(creds!.token).toBe('admin-token');
|
|
||||||
expect(creds!.user).toBe('admin@test.com');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('falls back to normal login when users exist', async () => {
|
|
||||||
let loginCalled = false;
|
|
||||||
const cmd = createLoginCommand({
|
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
prompt: {
|
|
||||||
input: async () => 'alice@test.com',
|
|
||||||
password: async () => 'secret',
|
|
||||||
},
|
|
||||||
log,
|
|
||||||
loginRequest: async (_url, email) => {
|
|
||||||
loginCalled = true;
|
|
||||||
return { token: 'session-tok', user: { email } };
|
|
||||||
},
|
|
||||||
logoutRequest: async () => {},
|
|
||||||
statusRequest: async () => ({ hasUsers: true }),
|
|
||||||
bootstrapRequest: async () => { throw new Error('Should not be called'); },
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
|
||||||
|
|
||||||
expect(loginCalled).toBe(true);
|
|
||||||
expect(output.join('\n')).not.toContain('No users configured');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('logout command', () => {
|
|
||||||
it('removes credentials on logout', async () => {
|
|
||||||
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'alice' }, { configDir: tempDir });
|
|
||||||
let logoutCalled = false;
|
|
||||||
const cmd = createLogoutCommand({
|
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
prompt: { input: async () => '', password: async () => '' },
|
|
||||||
log,
|
|
||||||
loginRequest: async () => ({ token: '', user: { email: '' } }),
|
|
||||||
logoutRequest: async () => { logoutCalled = true; },
|
|
||||||
statusRequest: async () => ({ hasUsers: true }),
|
|
||||||
bootstrapRequest: async () => ({ token: '', user: { email: '' } }),
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
|
||||||
expect(output[0]).toContain('Logged out successfully');
|
|
||||||
expect(logoutCalled).toBe(true);
|
|
||||||
|
|
||||||
const creds = loadCredentials({ configDir: tempDir });
|
|
||||||
expect(creds).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows not logged in when no credentials', async () => {
|
|
||||||
const cmd = createLogoutCommand({
|
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
prompt: { input: async () => '', password: async () => '' },
|
|
||||||
log,
|
|
||||||
loginRequest: async () => ({ token: '', user: { email: '' } }),
|
|
||||||
logoutRequest: async () => {},
|
|
||||||
statusRequest: async () => ({ hasUsers: true }),
|
|
||||||
bootstrapRequest: async () => ({ token: '', user: { email: '' } }),
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
|
||||||
expect(output[0]).toContain('Not logged in');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,120 +0,0 @@
|
|||||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
||||||
import fs from 'node:fs';
|
|
||||||
import { createBackupCommand, createRestoreCommand } from '../../src/commands/backup.js';
|
|
||||||
|
|
||||||
const mockClient = {
|
|
||||||
get: vi.fn(),
|
|
||||||
post: vi.fn(),
|
|
||||||
put: vi.fn(),
|
|
||||||
delete: vi.fn(),
|
|
||||||
};
|
|
||||||
|
|
||||||
const log = vi.fn();
|
|
||||||
|
|
||||||
describe('backup command', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
vi.resetAllMocks();
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
// Clean up any created files
|
|
||||||
try { fs.unlinkSync('test-backup.json'); } catch { /* ignore */ }
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates backup command', () => {
|
|
||||||
const cmd = createBackupCommand({ client: mockClient as never, log });
|
|
||||||
expect(cmd.name()).toBe('backup');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('calls API and writes file', async () => {
|
|
||||||
const bundle = { version: '1', servers: [], profiles: [], projects: [] };
|
|
||||||
mockClient.post.mockResolvedValue(bundle);
|
|
||||||
|
|
||||||
const cmd = createBackupCommand({ client: mockClient as never, log });
|
|
||||||
await cmd.parseAsync(['-o', 'test-backup.json'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/backup', {});
|
|
||||||
expect(fs.existsSync('test-backup.json')).toBe(true);
|
|
||||||
expect(log).toHaveBeenCalledWith(expect.stringContaining('test-backup.json'));
|
|
||||||
});
|
|
||||||
|
|
||||||
it('passes password when provided', async () => {
|
|
||||||
mockClient.post.mockResolvedValue({ version: '1', servers: [], profiles: [], projects: [] });
|
|
||||||
|
|
||||||
const cmd = createBackupCommand({ client: mockClient as never, log });
|
|
||||||
await cmd.parseAsync(['-o', 'test-backup.json', '-p', 'secret'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/backup', { password: 'secret' });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('passes resource filter', async () => {
|
|
||||||
mockClient.post.mockResolvedValue({ version: '1', servers: [], profiles: [], projects: [] });
|
|
||||||
|
|
||||||
const cmd = createBackupCommand({ client: mockClient as never, log });
|
|
||||||
await cmd.parseAsync(['-o', 'test-backup.json', '-r', 'servers,profiles'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/backup', {
|
|
||||||
resources: ['servers', 'profiles'],
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('restore command', () => {
|
|
||||||
const testFile = 'test-restore-input.json';
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
vi.resetAllMocks();
|
|
||||||
fs.writeFileSync(testFile, JSON.stringify({
|
|
||||||
version: '1', servers: [], profiles: [], projects: [],
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
try { fs.unlinkSync(testFile); } catch { /* ignore */ }
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates restore command', () => {
|
|
||||||
const cmd = createRestoreCommand({ client: mockClient as never, log });
|
|
||||||
expect(cmd.name()).toBe('restore');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('reads file and calls API', async () => {
|
|
||||||
mockClient.post.mockResolvedValue({
|
|
||||||
serversCreated: 1, serversSkipped: 0,
|
|
||||||
profilesCreated: 0, profilesSkipped: 0,
|
|
||||||
projectsCreated: 0, projectsSkipped: 0,
|
|
||||||
errors: [],
|
|
||||||
});
|
|
||||||
|
|
||||||
const cmd = createRestoreCommand({ client: mockClient as never, log });
|
|
||||||
await cmd.parseAsync(['-i', testFile], { from: 'user' });
|
|
||||||
|
|
||||||
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/restore', expect.objectContaining({
|
|
||||||
bundle: expect.objectContaining({ version: '1' }),
|
|
||||||
conflictStrategy: 'skip',
|
|
||||||
}));
|
|
||||||
expect(log).toHaveBeenCalledWith('Restore complete:');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('reports errors from restore', async () => {
|
|
||||||
mockClient.post.mockResolvedValue({
|
|
||||||
serversCreated: 0, serversSkipped: 0,
|
|
||||||
profilesCreated: 0, profilesSkipped: 0,
|
|
||||||
projectsCreated: 0, projectsSkipped: 0,
|
|
||||||
errors: ['Server "x" already exists'],
|
|
||||||
});
|
|
||||||
|
|
||||||
const cmd = createRestoreCommand({ client: mockClient as never, log });
|
|
||||||
await cmd.parseAsync(['-i', testFile], { from: 'user' });
|
|
||||||
|
|
||||||
expect(log).toHaveBeenCalledWith(expect.stringContaining('Errors'));
|
|
||||||
});
|
|
||||||
|
|
||||||
it('logs error for missing file', async () => {
|
|
||||||
const cmd = createRestoreCommand({ client: mockClient as never, log });
|
|
||||||
await cmd.parseAsync(['-i', 'nonexistent.json'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(log).toHaveBeenCalledWith(expect.stringContaining('not found'));
|
|
||||||
expect(mockClient.post).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,159 +0,0 @@
|
|||||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
||||||
import { writeFileSync, readFileSync, mkdtempSync, rmSync } from 'node:fs';
|
|
||||||
import { join } from 'node:path';
|
|
||||||
import { tmpdir } from 'node:os';
|
|
||||||
import { createConfigCommand } from '../../src/commands/config.js';
|
|
||||||
import type { ApiClient } from '../../src/api-client.js';
|
|
||||||
import { saveCredentials, loadCredentials } from '../../src/auth/index.js';
|
|
||||||
|
|
||||||
function mockClient(): ApiClient {
|
|
||||||
return {
|
|
||||||
get: vi.fn(async () => ({
|
|
||||||
mcpServers: {
|
|
||||||
'slack--default': { command: 'npx', args: ['-y', '@anthropic/slack-mcp'], env: { WORKSPACE: 'test' } },
|
|
||||||
'github--default': { command: 'npx', args: ['-y', '@anthropic/github-mcp'] },
|
|
||||||
},
|
|
||||||
})),
|
|
||||||
post: vi.fn(async () => ({ token: 'impersonated-tok', user: { email: 'other@test.com' } })),
|
|
||||||
put: vi.fn(async () => ({})),
|
|
||||||
delete: vi.fn(async () => {}),
|
|
||||||
} as unknown as ApiClient;
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('config claude-generate', () => {
|
|
||||||
let client: ReturnType<typeof mockClient>;
|
|
||||||
let output: string[];
|
|
||||||
let tmpDir: string;
|
|
||||||
const log = (...args: string[]) => output.push(args.join(' '));
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
client = mockClient();
|
|
||||||
output = [];
|
|
||||||
tmpDir = mkdtempSync(join(tmpdir(), 'mcpctl-config-claude-'));
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('generates .mcp.json from project config', async () => {
|
|
||||||
const outPath = join(tmpDir, '.mcp.json');
|
|
||||||
const cmd = createConfigCommand(
|
|
||||||
{ configDeps: { configDir: tmpDir }, log },
|
|
||||||
{ client, credentialsDeps: { configDir: tmpDir }, log },
|
|
||||||
);
|
|
||||||
await cmd.parseAsync(['claude-generate', '--project', 'proj-1', '-o', outPath], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.get).toHaveBeenCalledWith('/api/v1/projects/proj-1/mcp-config');
|
|
||||||
const written = JSON.parse(readFileSync(outPath, 'utf-8'));
|
|
||||||
expect(written.mcpServers['slack--default']).toBeDefined();
|
|
||||||
expect(output.join('\n')).toContain('2 server(s)');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('prints to stdout with --stdout', async () => {
|
|
||||||
const cmd = createConfigCommand(
|
|
||||||
{ configDeps: { configDir: tmpDir }, log },
|
|
||||||
{ client, credentialsDeps: { configDir: tmpDir }, log },
|
|
||||||
);
|
|
||||||
await cmd.parseAsync(['claude-generate', '--project', 'proj-1', '--stdout'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(output[0]).toContain('mcpServers');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('merges with existing .mcp.json', async () => {
|
|
||||||
const outPath = join(tmpDir, '.mcp.json');
|
|
||||||
writeFileSync(outPath, JSON.stringify({
|
|
||||||
mcpServers: { 'existing--server': { command: 'echo', args: [] } },
|
|
||||||
}));
|
|
||||||
|
|
||||||
const cmd = createConfigCommand(
|
|
||||||
{ configDeps: { configDir: tmpDir }, log },
|
|
||||||
{ client, credentialsDeps: { configDir: tmpDir }, log },
|
|
||||||
);
|
|
||||||
await cmd.parseAsync(['claude-generate', '--project', 'proj-1', '-o', outPath, '--merge'], { from: 'user' });
|
|
||||||
|
|
||||||
const written = JSON.parse(readFileSync(outPath, 'utf-8'));
|
|
||||||
expect(written.mcpServers['existing--server']).toBeDefined();
|
|
||||||
expect(written.mcpServers['slack--default']).toBeDefined();
|
|
||||||
expect(output.join('\n')).toContain('3 server(s)');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('config impersonate', () => {
|
|
||||||
let client: ReturnType<typeof mockClient>;
|
|
||||||
let output: string[];
|
|
||||||
let tmpDir: string;
|
|
||||||
const log = (...args: string[]) => output.push(args.join(' '));
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
client = mockClient();
|
|
||||||
output = [];
|
|
||||||
tmpDir = mkdtempSync(join(tmpdir(), 'mcpctl-config-impersonate-'));
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(() => {
|
|
||||||
rmSync(tmpDir, { recursive: true, force: true });
|
|
||||||
});
|
|
||||||
|
|
||||||
it('impersonates a user and saves backup', async () => {
|
|
||||||
saveCredentials({ token: 'admin-tok', mcpdUrl: 'http://localhost:3100', user: 'admin@test.com' }, { configDir: tmpDir });
|
|
||||||
|
|
||||||
const cmd = createConfigCommand(
|
|
||||||
{ configDeps: { configDir: tmpDir }, log },
|
|
||||||
{ client, credentialsDeps: { configDir: tmpDir }, log },
|
|
||||||
);
|
|
||||||
await cmd.parseAsync(['impersonate', 'other@test.com'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/auth/impersonate', { email: 'other@test.com' });
|
|
||||||
expect(output.join('\n')).toContain('Impersonating other@test.com');
|
|
||||||
|
|
||||||
const creds = loadCredentials({ configDir: tmpDir });
|
|
||||||
expect(creds!.user).toBe('other@test.com');
|
|
||||||
expect(creds!.token).toBe('impersonated-tok');
|
|
||||||
|
|
||||||
// Backup exists
|
|
||||||
const backup = JSON.parse(readFileSync(join(tmpDir, 'credentials-backup'), 'utf-8'));
|
|
||||||
expect(backup.user).toBe('admin@test.com');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('quits impersonation and restores backup', async () => {
|
|
||||||
// Set up current (impersonated) credentials
|
|
||||||
saveCredentials({ token: 'impersonated-tok', mcpdUrl: 'http://localhost:3100', user: 'other@test.com' }, { configDir: tmpDir });
|
|
||||||
// Set up backup (original) credentials
|
|
||||||
writeFileSync(join(tmpDir, 'credentials-backup'), JSON.stringify({
|
|
||||||
token: 'admin-tok', mcpdUrl: 'http://localhost:3100', user: 'admin@test.com',
|
|
||||||
}));
|
|
||||||
|
|
||||||
const cmd = createConfigCommand(
|
|
||||||
{ configDeps: { configDir: tmpDir }, log },
|
|
||||||
{ client, credentialsDeps: { configDir: tmpDir }, log },
|
|
||||||
);
|
|
||||||
await cmd.parseAsync(['impersonate', '--quit'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(output.join('\n')).toContain('Returned to admin@test.com');
|
|
||||||
|
|
||||||
const creds = loadCredentials({ configDir: tmpDir });
|
|
||||||
expect(creds!.user).toBe('admin@test.com');
|
|
||||||
expect(creds!.token).toBe('admin-tok');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('errors when not logged in', async () => {
|
|
||||||
const cmd = createConfigCommand(
|
|
||||||
{ configDeps: { configDir: tmpDir }, log },
|
|
||||||
{ client, credentialsDeps: { configDir: tmpDir }, log },
|
|
||||||
);
|
|
||||||
await cmd.parseAsync(['impersonate', 'other@test.com'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(output.join('\n')).toContain('Not logged in');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('errors when quitting with no backup', async () => {
|
|
||||||
const cmd = createConfigCommand(
|
|
||||||
{ configDeps: { configDir: tmpDir }, log },
|
|
||||||
{ client, credentialsDeps: { configDir: tmpDir }, log },
|
|
||||||
);
|
|
||||||
await cmd.parseAsync(['impersonate', '--quit'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(output.join('\n')).toContain('No impersonation session to quit');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -34,38 +34,23 @@ describe('config view', () => {
|
|||||||
await cmd.parseAsync(['view'], { from: 'user' });
|
await cmd.parseAsync(['view'], { from: 'user' });
|
||||||
expect(output).toHaveLength(1);
|
expect(output).toHaveLength(1);
|
||||||
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
||||||
expect(parsed['mcplocalUrl']).toBe('http://localhost:3200');
|
expect(parsed['daemonUrl']).toBe('http://localhost:3000');
|
||||||
expect(parsed['mcpdUrl']).toBe('http://localhost:3100');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('outputs config as YAML with --output yaml', async () => {
|
it('outputs config as YAML with --output yaml', async () => {
|
||||||
const cmd = makeCommand();
|
const cmd = makeCommand();
|
||||||
await cmd.parseAsync(['view', '-o', 'yaml'], { from: 'user' });
|
await cmd.parseAsync(['view', '-o', 'yaml'], { from: 'user' });
|
||||||
expect(output[0]).toContain('mcplocalUrl:');
|
expect(output[0]).toContain('daemonUrl:');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('config set', () => {
|
describe('config set', () => {
|
||||||
it('sets mcplocalUrl', async () => {
|
it('sets a string value', async () => {
|
||||||
const cmd = makeCommand();
|
const cmd = makeCommand();
|
||||||
await cmd.parseAsync(['set', 'mcplocalUrl', 'http://new:9000'], { from: 'user' });
|
await cmd.parseAsync(['set', 'daemonUrl', 'http://new:9000'], { from: 'user' });
|
||||||
expect(output[0]).toContain('mcplocalUrl');
|
expect(output[0]).toContain('daemonUrl');
|
||||||
const config = loadConfig({ configDir: tempDir });
|
const config = loadConfig({ configDir: tempDir });
|
||||||
expect(config.mcplocalUrl).toBe('http://new:9000');
|
expect(config.daemonUrl).toBe('http://new:9000');
|
||||||
});
|
|
||||||
|
|
||||||
it('sets mcpdUrl', async () => {
|
|
||||||
const cmd = makeCommand();
|
|
||||||
await cmd.parseAsync(['set', 'mcpdUrl', 'http://remote:3100'], { from: 'user' });
|
|
||||||
const config = loadConfig({ configDir: tempDir });
|
|
||||||
expect(config.mcpdUrl).toBe('http://remote:3100');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('maps daemonUrl to mcplocalUrl for backward compat', async () => {
|
|
||||||
const cmd = makeCommand();
|
|
||||||
await cmd.parseAsync(['set', 'daemonUrl', 'http://legacy:3000'], { from: 'user' });
|
|
||||||
const config = loadConfig({ configDir: tempDir });
|
|
||||||
expect(config.mcplocalUrl).toBe('http://legacy:3000');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('sets cacheTTLMs as integer', async () => {
|
it('sets cacheTTLMs as integer', async () => {
|
||||||
@@ -102,13 +87,13 @@ describe('config path', () => {
|
|||||||
describe('config reset', () => {
|
describe('config reset', () => {
|
||||||
it('resets to defaults', async () => {
|
it('resets to defaults', async () => {
|
||||||
// First set a custom value
|
// First set a custom value
|
||||||
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://custom' }, { configDir: tempDir });
|
saveConfig({ ...DEFAULT_CONFIG, daemonUrl: 'http://custom' }, { configDir: tempDir });
|
||||||
|
|
||||||
const cmd = makeCommand();
|
const cmd = makeCommand();
|
||||||
await cmd.parseAsync(['reset'], { from: 'user' });
|
await cmd.parseAsync(['reset'], { from: 'user' });
|
||||||
expect(output[0]).toContain('reset');
|
expect(output[0]).toContain('reset');
|
||||||
|
|
||||||
const config = loadConfig({ configDir: tempDir });
|
const config = loadConfig({ configDir: tempDir });
|
||||||
expect(config.mcplocalUrl).toBe(DEFAULT_CONFIG.mcplocalUrl);
|
expect(config.daemonUrl).toBe(DEFAULT_CONFIG.daemonUrl);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,450 +0,0 @@
|
|||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
||||||
import { createCreateCommand } from '../../src/commands/create.js';
|
|
||||||
import { type ApiClient, ApiError } from '../../src/api-client.js';
|
|
||||||
|
|
||||||
function mockClient(): ApiClient {
|
|
||||||
return {
|
|
||||||
get: vi.fn(async () => []),
|
|
||||||
post: vi.fn(async () => ({ id: 'new-id', name: 'test' })),
|
|
||||||
put: vi.fn(async () => ({})),
|
|
||||||
delete: vi.fn(async () => {}),
|
|
||||||
} as unknown as ApiClient;
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('create command', () => {
|
|
||||||
let client: ReturnType<typeof mockClient>;
|
|
||||||
let output: string[];
|
|
||||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
client = mockClient();
|
|
||||||
output = [];
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('create server', () => {
|
|
||||||
it('creates a server with minimal flags', async () => {
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['server', 'my-server'], { from: 'user' });
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/servers', expect.objectContaining({
|
|
||||||
name: 'my-server',
|
|
||||||
transport: 'STDIO',
|
|
||||||
replicas: 1,
|
|
||||||
}));
|
|
||||||
expect(output.join('\n')).toContain("server 'test' created");
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates a server with all flags', async () => {
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'server', 'ha-mcp',
|
|
||||||
'-d', 'Home Assistant MCP',
|
|
||||||
'--docker-image', 'ghcr.io/ha-mcp:latest',
|
|
||||||
'--transport', 'STREAMABLE_HTTP',
|
|
||||||
'--external-url', 'http://localhost:8086/mcp',
|
|
||||||
'--container-port', '3000',
|
|
||||||
'--replicas', '2',
|
|
||||||
'--command', 'python',
|
|
||||||
'--command', '-c',
|
|
||||||
'--command', 'print("hello")',
|
|
||||||
'--env', 'API_KEY=secretRef:creds:API_KEY',
|
|
||||||
'--env', 'BASE_URL=http://localhost',
|
|
||||||
], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/servers', {
|
|
||||||
name: 'ha-mcp',
|
|
||||||
description: 'Home Assistant MCP',
|
|
||||||
dockerImage: 'ghcr.io/ha-mcp:latest',
|
|
||||||
transport: 'STREAMABLE_HTTP',
|
|
||||||
externalUrl: 'http://localhost:8086/mcp',
|
|
||||||
containerPort: 3000,
|
|
||||||
replicas: 2,
|
|
||||||
command: ['python', '-c', 'print("hello")'],
|
|
||||||
env: [
|
|
||||||
{ name: 'API_KEY', valueFrom: { secretRef: { name: 'creds', key: 'API_KEY' } } },
|
|
||||||
{ name: 'BASE_URL', value: 'http://localhost' },
|
|
||||||
],
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('defaults transport to STDIO', async () => {
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['server', 'test'], { from: 'user' });
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/servers', expect.objectContaining({
|
|
||||||
transport: 'STDIO',
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
|
|
||||||
it('strips null values from template when using --from-template', async () => {
|
|
||||||
vi.mocked(client.get).mockResolvedValueOnce([{
|
|
||||||
id: 'tpl-1',
|
|
||||||
name: 'grafana',
|
|
||||||
version: '1.0.0',
|
|
||||||
description: 'Grafana MCP',
|
|
||||||
packageName: '@leval/mcp-grafana',
|
|
||||||
dockerImage: null,
|
|
||||||
transport: 'STDIO',
|
|
||||||
repositoryUrl: 'https://github.com/test',
|
|
||||||
externalUrl: null,
|
|
||||||
command: null,
|
|
||||||
containerPort: null,
|
|
||||||
replicas: 1,
|
|
||||||
env: [{ name: 'TOKEN', required: true, description: 'A token' }],
|
|
||||||
healthCheck: { tool: 'test', arguments: {} },
|
|
||||||
createdAt: '2025-01-01',
|
|
||||||
updatedAt: '2025-01-01',
|
|
||||||
}] as never);
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'server', 'my-grafana', '--from-template=grafana',
|
|
||||||
'--env', 'TOKEN=secretRef:creds:TOKEN',
|
|
||||||
], { from: 'user' });
|
|
||||||
const call = vi.mocked(client.post).mock.calls[0]![1] as Record<string, unknown>;
|
|
||||||
// null fields from template should NOT be in the body
|
|
||||||
expect(call).not.toHaveProperty('dockerImage');
|
|
||||||
expect(call).not.toHaveProperty('externalUrl');
|
|
||||||
expect(call).not.toHaveProperty('command');
|
|
||||||
expect(call).not.toHaveProperty('containerPort');
|
|
||||||
// non-null fields should be present
|
|
||||||
expect(call.packageName).toBe('@leval/mcp-grafana');
|
|
||||||
expect(call.healthCheck).toEqual({ tool: 'test', arguments: {} });
|
|
||||||
expect(call.templateName).toBe('grafana');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws on 409 without --force', async () => {
|
|
||||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"Server already exists: my-server"}'));
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await expect(cmd.parseAsync(['server', 'my-server'], { from: 'user' })).rejects.toThrow('API error 409');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates existing server on 409 with --force', async () => {
|
|
||||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"Server already exists"}'));
|
|
||||||
vi.mocked(client.get).mockResolvedValueOnce([{ id: 'srv-1', name: 'my-server' }] as never);
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['server', 'my-server', '--force'], { from: 'user' });
|
|
||||||
expect(client.put).toHaveBeenCalledWith('/api/v1/servers/srv-1', expect.objectContaining({
|
|
||||||
transport: 'STDIO',
|
|
||||||
}));
|
|
||||||
expect(output.join('\n')).toContain("server 'my-server' updated");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('create secret', () => {
|
|
||||||
it('creates a secret with --data flags', async () => {
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'secret', 'ha-creds',
|
|
||||||
'--data', 'TOKEN=abc123',
|
|
||||||
'--data', 'URL=https://ha.local',
|
|
||||||
], { from: 'user' });
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/secrets', {
|
|
||||||
name: 'ha-creds',
|
|
||||||
data: { TOKEN: 'abc123', URL: 'https://ha.local' },
|
|
||||||
});
|
|
||||||
expect(output.join('\n')).toContain("secret 'test' created");
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates a secret with empty data', async () => {
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['secret', 'empty-secret'], { from: 'user' });
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/secrets', {
|
|
||||||
name: 'empty-secret',
|
|
||||||
data: {},
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws on 409 without --force', async () => {
|
|
||||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"Secret already exists: my-creds"}'));
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await expect(cmd.parseAsync(['secret', 'my-creds', '--data', 'KEY=val'], { from: 'user' })).rejects.toThrow('API error 409');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates existing secret on 409 with --force', async () => {
|
|
||||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"Secret already exists"}'));
|
|
||||||
vi.mocked(client.get).mockResolvedValueOnce([{ id: 'sec-1', name: 'my-creds' }] as never);
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['secret', 'my-creds', '--data', 'KEY=val', '--force'], { from: 'user' });
|
|
||||||
expect(client.put).toHaveBeenCalledWith('/api/v1/secrets/sec-1', { data: { KEY: 'val' } });
|
|
||||||
expect(output.join('\n')).toContain("secret 'my-creds' updated");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('create project', () => {
|
|
||||||
it('creates a project', async () => {
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['project', 'my-project', '-d', 'A test project'], { from: 'user' });
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/projects', {
|
|
||||||
name: 'my-project',
|
|
||||||
description: 'A test project',
|
|
||||||
proxyMode: 'direct',
|
|
||||||
});
|
|
||||||
expect(output.join('\n')).toContain("project 'test' created");
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates a project with no description', async () => {
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['project', 'minimal'], { from: 'user' });
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/projects', {
|
|
||||||
name: 'minimal',
|
|
||||||
description: '',
|
|
||||||
proxyMode: 'direct',
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates existing project on 409 with --force', async () => {
|
|
||||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"Project already exists"}'));
|
|
||||||
vi.mocked(client.get).mockResolvedValueOnce([{ id: 'proj-1', name: 'my-proj' }] as never);
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['project', 'my-proj', '-d', 'updated', '--force'], { from: 'user' });
|
|
||||||
expect(client.put).toHaveBeenCalledWith('/api/v1/projects/proj-1', { description: 'updated', proxyMode: 'direct' });
|
|
||||||
expect(output.join('\n')).toContain("project 'my-proj' updated");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('create user', () => {
|
|
||||||
it('creates a user with password and name', async () => {
|
|
||||||
vi.mocked(client.post).mockResolvedValueOnce({ id: 'usr-1', email: 'alice@test.com' });
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'user', 'alice@test.com',
|
|
||||||
'--password', 'secret123',
|
|
||||||
'--name', 'Alice',
|
|
||||||
], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/users', {
|
|
||||||
email: 'alice@test.com',
|
|
||||||
password: 'secret123',
|
|
||||||
name: 'Alice',
|
|
||||||
});
|
|
||||||
expect(output.join('\n')).toContain("user 'alice@test.com' created");
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not send role field (RBAC is the auth mechanism)', async () => {
|
|
||||||
vi.mocked(client.post).mockResolvedValueOnce({ id: 'usr-1', email: 'admin@test.com' });
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'user', 'admin@test.com',
|
|
||||||
'--password', 'pass123',
|
|
||||||
], { from: 'user' });
|
|
||||||
|
|
||||||
const callBody = vi.mocked(client.post).mock.calls[0]![1] as Record<string, unknown>;
|
|
||||||
expect(callBody).not.toHaveProperty('role');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('requires --password', async () => {
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await expect(cmd.parseAsync(['user', 'alice@test.com'], { from: 'user' })).rejects.toThrow('--password is required');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws on 409 without --force', async () => {
|
|
||||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"User already exists"}'));
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await expect(
|
|
||||||
cmd.parseAsync(['user', 'alice@test.com', '--password', 'pass'], { from: 'user' }),
|
|
||||||
).rejects.toThrow('API error 409');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates existing user on 409 with --force', async () => {
|
|
||||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"User already exists"}'));
|
|
||||||
vi.mocked(client.get).mockResolvedValueOnce([{ id: 'usr-1', email: 'alice@test.com' }] as never);
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'user', 'alice@test.com', '--password', 'newpass', '--name', 'Alice New', '--force',
|
|
||||||
], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.put).toHaveBeenCalledWith('/api/v1/users/usr-1', {
|
|
||||||
password: 'newpass',
|
|
||||||
name: 'Alice New',
|
|
||||||
});
|
|
||||||
expect(output.join('\n')).toContain("user 'alice@test.com' updated");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('create group', () => {
|
|
||||||
it('creates a group with members', async () => {
|
|
||||||
vi.mocked(client.post).mockResolvedValueOnce({ id: 'grp-1', name: 'dev-team' });
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'group', 'dev-team',
|
|
||||||
'--description', 'Development team',
|
|
||||||
'--member', 'alice@test.com',
|
|
||||||
'--member', 'bob@test.com',
|
|
||||||
], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/groups', {
|
|
||||||
name: 'dev-team',
|
|
||||||
description: 'Development team',
|
|
||||||
members: ['alice@test.com', 'bob@test.com'],
|
|
||||||
});
|
|
||||||
expect(output.join('\n')).toContain("group 'dev-team' created");
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates a group with no members', async () => {
|
|
||||||
vi.mocked(client.post).mockResolvedValueOnce({ id: 'grp-1', name: 'empty-group' });
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['group', 'empty-group'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/groups', {
|
|
||||||
name: 'empty-group',
|
|
||||||
members: [],
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws on 409 without --force', async () => {
|
|
||||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"Group already exists"}'));
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await expect(
|
|
||||||
cmd.parseAsync(['group', 'dev-team'], { from: 'user' }),
|
|
||||||
).rejects.toThrow('API error 409');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates existing group on 409 with --force', async () => {
|
|
||||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"Group already exists"}'));
|
|
||||||
vi.mocked(client.get).mockResolvedValueOnce([{ id: 'grp-1', name: 'dev-team' }] as never);
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'group', 'dev-team', '--member', 'new@test.com', '--force',
|
|
||||||
], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.put).toHaveBeenCalledWith('/api/v1/groups/grp-1', {
|
|
||||||
members: ['new@test.com'],
|
|
||||||
});
|
|
||||||
expect(output.join('\n')).toContain("group 'dev-team' updated");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('create rbac', () => {
|
|
||||||
it('creates an RBAC definition with subjects and bindings', async () => {
|
|
||||||
vi.mocked(client.post).mockResolvedValueOnce({ id: 'rbac-1', name: 'developers' });
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'rbac', 'developers',
|
|
||||||
'--subject', 'User:alice@test.com',
|
|
||||||
'--subject', 'Group:dev-team',
|
|
||||||
'--binding', 'edit:servers',
|
|
||||||
'--binding', 'view:instances',
|
|
||||||
], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
|
||||||
name: 'developers',
|
|
||||||
subjects: [
|
|
||||||
{ kind: 'User', name: 'alice@test.com' },
|
|
||||||
{ kind: 'Group', name: 'dev-team' },
|
|
||||||
],
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'edit', resource: 'servers' },
|
|
||||||
{ role: 'view', resource: 'instances' },
|
|
||||||
],
|
|
||||||
});
|
|
||||||
expect(output.join('\n')).toContain("rbac 'developers' created");
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates an RBAC definition with wildcard resource', async () => {
|
|
||||||
vi.mocked(client.post).mockResolvedValueOnce({ id: 'rbac-1', name: 'admins' });
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'rbac', 'admins',
|
|
||||||
'--subject', 'User:admin@test.com',
|
|
||||||
'--binding', 'edit:*',
|
|
||||||
], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
|
||||||
name: 'admins',
|
|
||||||
subjects: [{ kind: 'User', name: 'admin@test.com' }],
|
|
||||||
roleBindings: [{ role: 'edit', resource: '*' }],
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates an RBAC definition with empty subjects and bindings', async () => {
|
|
||||||
vi.mocked(client.post).mockResolvedValueOnce({ id: 'rbac-1', name: 'empty' });
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['rbac', 'empty'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
|
||||||
name: 'empty',
|
|
||||||
subjects: [],
|
|
||||||
roleBindings: [],
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws on invalid subject format', async () => {
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await expect(
|
|
||||||
cmd.parseAsync(['rbac', 'bad', '--subject', 'no-colon'], { from: 'user' }),
|
|
||||||
).rejects.toThrow('Invalid subject format');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws on invalid binding format', async () => {
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await expect(
|
|
||||||
cmd.parseAsync(['rbac', 'bad', '--binding', 'no-colon'], { from: 'user' }),
|
|
||||||
).rejects.toThrow('Invalid binding format');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws on 409 without --force', async () => {
|
|
||||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"RBAC already exists"}'));
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await expect(
|
|
||||||
cmd.parseAsync(['rbac', 'developers', '--subject', 'User:a@b.com', '--binding', 'edit:servers'], { from: 'user' }),
|
|
||||||
).rejects.toThrow('API error 409');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates existing RBAC on 409 with --force', async () => {
|
|
||||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"RBAC already exists"}'));
|
|
||||||
vi.mocked(client.get).mockResolvedValueOnce([{ id: 'rbac-1', name: 'developers' }] as never);
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'rbac', 'developers',
|
|
||||||
'--subject', 'User:new@test.com',
|
|
||||||
'--binding', 'edit:*',
|
|
||||||
'--force',
|
|
||||||
], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.put).toHaveBeenCalledWith('/api/v1/rbac/rbac-1', {
|
|
||||||
subjects: [{ kind: 'User', name: 'new@test.com' }],
|
|
||||||
roleBindings: [{ role: 'edit', resource: '*' }],
|
|
||||||
});
|
|
||||||
expect(output.join('\n')).toContain("rbac 'developers' updated");
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates an RBAC definition with operation bindings', async () => {
|
|
||||||
vi.mocked(client.post).mockResolvedValueOnce({ id: 'rbac-1', name: 'ops' });
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'rbac', 'ops',
|
|
||||||
'--subject', 'Group:ops-team',
|
|
||||||
'--binding', 'edit:servers',
|
|
||||||
'--operation', 'logs',
|
|
||||||
'--operation', 'backup',
|
|
||||||
], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
|
||||||
name: 'ops',
|
|
||||||
subjects: [{ kind: 'Group', name: 'ops-team' }],
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'edit', resource: 'servers' },
|
|
||||||
{ role: 'run', action: 'logs' },
|
|
||||||
{ role: 'run', action: 'backup' },
|
|
||||||
],
|
|
||||||
});
|
|
||||||
expect(output.join('\n')).toContain("rbac 'ops' created");
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates an RBAC definition with name-scoped binding', async () => {
|
|
||||||
vi.mocked(client.post).mockResolvedValueOnce({ id: 'rbac-1', name: 'ha-viewer' });
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'rbac', 'ha-viewer',
|
|
||||||
'--subject', 'User:alice@test.com',
|
|
||||||
'--binding', 'view:servers:my-ha',
|
|
||||||
], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/rbac', {
|
|
||||||
name: 'ha-viewer',
|
|
||||||
subjects: [{ kind: 'User', name: 'alice@test.com' }],
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'view', resource: 'servers', name: 'my-ha' },
|
|
||||||
],
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,696 +0,0 @@
|
|||||||
import { describe, it, expect, vi } from 'vitest';
|
|
||||||
import { createDescribeCommand } from '../../src/commands/describe.js';
|
|
||||||
import type { DescribeCommandDeps } from '../../src/commands/describe.js';
|
|
||||||
import type { ApiClient } from '../../src/api-client.js';
|
|
||||||
|
|
||||||
function mockClient(): ApiClient {
|
|
||||||
return {
|
|
||||||
get: vi.fn(async () => []),
|
|
||||||
post: vi.fn(async () => ({})),
|
|
||||||
put: vi.fn(async () => ({})),
|
|
||||||
delete: vi.fn(async () => {}),
|
|
||||||
} as unknown as ApiClient;
|
|
||||||
}
|
|
||||||
|
|
||||||
function makeDeps(item: unknown = {}): DescribeCommandDeps & { output: string[] } {
|
|
||||||
const output: string[] = [];
|
|
||||||
return {
|
|
||||||
output,
|
|
||||||
client: mockClient(),
|
|
||||||
fetchResource: vi.fn(async () => item),
|
|
||||||
log: (...args: string[]) => output.push(args.join(' ')),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('describe command', () => {
|
|
||||||
it('shows detailed server info with sections', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'srv-1',
|
|
||||||
name: 'slack',
|
|
||||||
transport: 'STDIO',
|
|
||||||
packageName: '@slack/mcp',
|
|
||||||
dockerImage: null,
|
|
||||||
env: [],
|
|
||||||
createdAt: '2025-01-01',
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'server', 'srv-1']);
|
|
||||||
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('servers', 'srv-1');
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== Server: slack ===');
|
|
||||||
expect(text).toContain('Name:');
|
|
||||||
expect(text).toContain('slack');
|
|
||||||
expect(text).toContain('Transport:');
|
|
||||||
expect(text).toContain('STDIO');
|
|
||||||
expect(text).toContain('Package:');
|
|
||||||
expect(text).toContain('@slack/mcp');
|
|
||||||
expect(text).toContain('Metadata:');
|
|
||||||
expect(text).toContain('ID:');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('resolves resource aliases', async () => {
|
|
||||||
const deps = makeDeps({ id: 's1' });
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'sec', 's1']);
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('secrets', 's1');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('outputs JSON format', async () => {
|
|
||||||
const deps = makeDeps({ id: 'srv-1', name: 'slack' });
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'server', 'srv-1', '-o', 'json']);
|
|
||||||
|
|
||||||
const parsed = JSON.parse(deps.output[0] ?? '');
|
|
||||||
expect(parsed.name).toBe('slack');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('outputs YAML format', async () => {
|
|
||||||
const deps = makeDeps({ id: 'srv-1', name: 'slack' });
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'server', 'srv-1', '-o', 'yaml']);
|
|
||||||
expect(deps.output[0]).toContain('name: slack');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows project detail', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'proj-1',
|
|
||||||
name: 'my-project',
|
|
||||||
description: 'A test project',
|
|
||||||
ownerId: 'user-1',
|
|
||||||
createdAt: '2025-01-01',
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'project', 'proj-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== Project: my-project ===');
|
|
||||||
expect(text).toContain('A test project');
|
|
||||||
expect(text).toContain('user-1');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows secret detail with masked values', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'sec-1',
|
|
||||||
name: 'ha-creds',
|
|
||||||
data: { TOKEN: 'abc123', URL: 'https://ha.local' },
|
|
||||||
createdAt: '2025-01-01',
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'secret', 'sec-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== Secret: ha-creds ===');
|
|
||||||
expect(text).toContain('TOKEN');
|
|
||||||
expect(text).toContain('***');
|
|
||||||
expect(text).not.toContain('abc123');
|
|
||||||
expect(text).toContain('use --show-values to reveal');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows secret detail with revealed values when --show-values', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'sec-1',
|
|
||||||
name: 'ha-creds',
|
|
||||||
data: { TOKEN: 'abc123' },
|
|
||||||
createdAt: '2025-01-01',
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'secret', 'sec-1', '--show-values']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('abc123');
|
|
||||||
expect(text).not.toContain('***');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows instance detail with container info', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'inst-1',
|
|
||||||
serverId: 'srv-1',
|
|
||||||
status: 'RUNNING',
|
|
||||||
containerId: 'abc123',
|
|
||||||
port: 3000,
|
|
||||||
createdAt: '2025-01-01',
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'instance', 'inst-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== Instance: inst-1 ===');
|
|
||||||
expect(text).toContain('RUNNING');
|
|
||||||
expect(text).toContain('abc123');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('resolves server name to instance for describe instance', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'inst-1',
|
|
||||||
serverId: 'srv-1',
|
|
||||||
server: { name: 'my-grafana' },
|
|
||||||
status: 'RUNNING',
|
|
||||||
containerId: 'abc123',
|
|
||||||
port: 3000,
|
|
||||||
});
|
|
||||||
// resolveNameOrId will throw (not a CUID, name won't match instances)
|
|
||||||
vi.mocked(deps.client.get)
|
|
||||||
.mockResolvedValueOnce([] as never) // instances list (no name match)
|
|
||||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'my-grafana' }] as never) // servers list
|
|
||||||
.mockResolvedValueOnce([{ id: 'inst-1', status: 'RUNNING' }] as never); // instances for server
|
|
||||||
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'instance', 'my-grafana']);
|
|
||||||
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('instances', 'inst-1');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('resolves server name and picks running instance over stopped', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'inst-2',
|
|
||||||
serverId: 'srv-1',
|
|
||||||
server: { name: 'my-ha' },
|
|
||||||
status: 'RUNNING',
|
|
||||||
containerId: 'def456',
|
|
||||||
});
|
|
||||||
vi.mocked(deps.client.get)
|
|
||||||
.mockResolvedValueOnce([] as never) // instances list
|
|
||||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'my-ha' }] as never)
|
|
||||||
.mockResolvedValueOnce([
|
|
||||||
{ id: 'inst-1', status: 'ERROR' },
|
|
||||||
{ id: 'inst-2', status: 'RUNNING' },
|
|
||||||
] as never);
|
|
||||||
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'instance', 'my-ha']);
|
|
||||||
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('instances', 'inst-2');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws when no instances found for server name', async () => {
|
|
||||||
const deps = makeDeps();
|
|
||||||
vi.mocked(deps.client.get)
|
|
||||||
.mockResolvedValueOnce([] as never) // instances list
|
|
||||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'my-server' }] as never)
|
|
||||||
.mockResolvedValueOnce([] as never); // no instances
|
|
||||||
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await expect(cmd.parseAsync(['node', 'test', 'instance', 'my-server'])).rejects.toThrow(
|
|
||||||
/No instances found/,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows instance with server name in header', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'inst-1',
|
|
||||||
serverId: 'srv-1',
|
|
||||||
server: { name: 'my-grafana' },
|
|
||||||
status: 'RUNNING',
|
|
||||||
containerId: 'abc123',
|
|
||||||
port: 3000,
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'instance', 'inst-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== Instance: my-grafana ===');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows instance health and events', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'inst-1',
|
|
||||||
serverId: 'srv-1',
|
|
||||||
server: { name: 'my-grafana' },
|
|
||||||
status: 'RUNNING',
|
|
||||||
containerId: 'abc123',
|
|
||||||
healthStatus: 'healthy',
|
|
||||||
lastHealthCheck: '2025-01-15T10:30:00Z',
|
|
||||||
events: [
|
|
||||||
{ timestamp: '2025-01-15T10:30:00Z', type: 'Normal', message: 'Health check passed (45ms)' },
|
|
||||||
],
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'instance', 'inst-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('Health:');
|
|
||||||
expect(text).toContain('healthy');
|
|
||||||
expect(text).toContain('Events:');
|
|
||||||
expect(text).toContain('Health check passed');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows server healthCheck section', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'srv-1',
|
|
||||||
name: 'my-grafana',
|
|
||||||
transport: 'STDIO',
|
|
||||||
healthCheck: {
|
|
||||||
tool: 'list_datasources',
|
|
||||||
arguments: {},
|
|
||||||
intervalSeconds: 60,
|
|
||||||
timeoutSeconds: 10,
|
|
||||||
failureThreshold: 3,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'server', 'srv-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('Health Check:');
|
|
||||||
expect(text).toContain('list_datasources');
|
|
||||||
expect(text).toContain('60s');
|
|
||||||
expect(text).toContain('Failure Threshold:');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows template detail with healthCheck and usage', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'tpl-1',
|
|
||||||
name: 'grafana',
|
|
||||||
transport: 'STDIO',
|
|
||||||
version: '1.0.0',
|
|
||||||
packageName: '@leval/mcp-grafana',
|
|
||||||
env: [
|
|
||||||
{ name: 'GRAFANA_URL', required: true, description: 'Grafana instance URL' },
|
|
||||||
],
|
|
||||||
healthCheck: {
|
|
||||||
tool: 'list_datasources',
|
|
||||||
arguments: {},
|
|
||||||
intervalSeconds: 60,
|
|
||||||
timeoutSeconds: 10,
|
|
||||||
failureThreshold: 3,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'template', 'tpl-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== Template: grafana ===');
|
|
||||||
expect(text).toContain('@leval/mcp-grafana');
|
|
||||||
expect(text).toContain('GRAFANA_URL');
|
|
||||||
expect(text).toContain('Health Check:');
|
|
||||||
expect(text).toContain('list_datasources');
|
|
||||||
expect(text).toContain('mcpctl create server my-grafana --from-template=grafana');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows user detail (no Role field — RBAC is the auth mechanism)', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'usr-1',
|
|
||||||
email: 'alice@test.com',
|
|
||||||
name: 'Alice Smith',
|
|
||||||
provider: null,
|
|
||||||
createdAt: '2025-01-01',
|
|
||||||
updatedAt: '2025-01-15',
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'user', 'usr-1']);
|
|
||||||
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('users', 'usr-1');
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== User: alice@test.com ===');
|
|
||||||
expect(text).toContain('Email:');
|
|
||||||
expect(text).toContain('alice@test.com');
|
|
||||||
expect(text).toContain('Name:');
|
|
||||||
expect(text).toContain('Alice Smith');
|
|
||||||
expect(text).not.toContain('Role:');
|
|
||||||
expect(text).toContain('Provider:');
|
|
||||||
expect(text).toContain('local');
|
|
||||||
expect(text).toContain('ID:');
|
|
||||||
expect(text).toContain('usr-1');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows user with no name as dash', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'usr-2',
|
|
||||||
email: 'bob@test.com',
|
|
||||||
name: null,
|
|
||||||
provider: 'oidc',
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'user', 'usr-2']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== User: bob@test.com ===');
|
|
||||||
expect(text).toContain('Name:');
|
|
||||||
expect(text).toContain('-');
|
|
||||||
expect(text).not.toContain('Role:');
|
|
||||||
expect(text).toContain('oidc');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows group detail with members', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'grp-1',
|
|
||||||
name: 'dev-team',
|
|
||||||
description: 'Development team',
|
|
||||||
members: [
|
|
||||||
{ user: { email: 'alice@test.com' }, createdAt: '2025-01-01' },
|
|
||||||
{ user: { email: 'bob@test.com' }, createdAt: '2025-01-02' },
|
|
||||||
],
|
|
||||||
createdAt: '2025-01-01',
|
|
||||||
updatedAt: '2025-01-15',
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'group', 'grp-1']);
|
|
||||||
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('groups', 'grp-1');
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== Group: dev-team ===');
|
|
||||||
expect(text).toContain('Name:');
|
|
||||||
expect(text).toContain('dev-team');
|
|
||||||
expect(text).toContain('Description:');
|
|
||||||
expect(text).toContain('Development team');
|
|
||||||
expect(text).toContain('Members:');
|
|
||||||
expect(text).toContain('EMAIL');
|
|
||||||
expect(text).toContain('ADDED');
|
|
||||||
expect(text).toContain('alice@test.com');
|
|
||||||
expect(text).toContain('bob@test.com');
|
|
||||||
expect(text).toContain('ID:');
|
|
||||||
expect(text).toContain('grp-1');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows group detail with no members', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'grp-2',
|
|
||||||
name: 'empty-group',
|
|
||||||
description: '',
|
|
||||||
members: [],
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'group', 'grp-2']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== Group: empty-group ===');
|
|
||||||
// No Members section when empty
|
|
||||||
expect(text).not.toContain('EMAIL');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows RBAC detail with subjects and bindings', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'rbac-1',
|
|
||||||
name: 'developers',
|
|
||||||
subjects: [
|
|
||||||
{ kind: 'User', name: 'alice@test.com' },
|
|
||||||
{ kind: 'Group', name: 'dev-team' },
|
|
||||||
],
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'edit', resource: 'servers' },
|
|
||||||
{ role: 'view', resource: 'instances' },
|
|
||||||
{ role: 'view', resource: 'projects' },
|
|
||||||
],
|
|
||||||
createdAt: '2025-01-01',
|
|
||||||
updatedAt: '2025-01-15',
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'rbac', 'rbac-1']);
|
|
||||||
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('rbac', 'rbac-1');
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== RBAC: developers ===');
|
|
||||||
expect(text).toContain('Name:');
|
|
||||||
expect(text).toContain('developers');
|
|
||||||
// Subjects section
|
|
||||||
expect(text).toContain('Subjects:');
|
|
||||||
expect(text).toContain('KIND');
|
|
||||||
expect(text).toContain('NAME');
|
|
||||||
expect(text).toContain('User');
|
|
||||||
expect(text).toContain('alice@test.com');
|
|
||||||
expect(text).toContain('Group');
|
|
||||||
expect(text).toContain('dev-team');
|
|
||||||
// Role Bindings section
|
|
||||||
expect(text).toContain('Resource Bindings:');
|
|
||||||
expect(text).toContain('ROLE');
|
|
||||||
expect(text).toContain('RESOURCE');
|
|
||||||
expect(text).toContain('edit');
|
|
||||||
expect(text).toContain('servers');
|
|
||||||
expect(text).toContain('view');
|
|
||||||
expect(text).toContain('instances');
|
|
||||||
expect(text).toContain('projects');
|
|
||||||
expect(text).toContain('ID:');
|
|
||||||
expect(text).toContain('rbac-1');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows RBAC detail with wildcard resource', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'rbac-2',
|
|
||||||
name: 'admins',
|
|
||||||
subjects: [{ kind: 'User', name: 'admin@test.com' }],
|
|
||||||
roleBindings: [{ role: 'edit', resource: '*' }],
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'rbac', 'rbac-2']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== RBAC: admins ===');
|
|
||||||
expect(text).toContain('edit');
|
|
||||||
expect(text).toContain('*');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows RBAC detail with empty subjects and bindings', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'rbac-3',
|
|
||||||
name: 'empty-rbac',
|
|
||||||
subjects: [],
|
|
||||||
roleBindings: [],
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'rbac', 'rbac-3']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== RBAC: empty-rbac ===');
|
|
||||||
// No Subjects or Role Bindings sections when empty
|
|
||||||
expect(text).not.toContain('KIND');
|
|
||||||
expect(text).not.toContain('ROLE');
|
|
||||||
expect(text).not.toContain('RESOURCE');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows RBAC detail with mixed resource and operation bindings', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'rbac-1',
|
|
||||||
name: 'admin-access',
|
|
||||||
subjects: [{ kind: 'Group', name: 'admin' }],
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'edit', resource: '*' },
|
|
||||||
{ role: 'run', resource: 'projects' },
|
|
||||||
{ role: 'run', action: 'logs' },
|
|
||||||
{ role: 'run', action: 'backup' },
|
|
||||||
],
|
|
||||||
createdAt: '2025-01-01',
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'rbac', 'rbac-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('Resource Bindings:');
|
|
||||||
expect(text).toContain('edit');
|
|
||||||
expect(text).toContain('*');
|
|
||||||
expect(text).toContain('run');
|
|
||||||
expect(text).toContain('projects');
|
|
||||||
expect(text).toContain('Operations:');
|
|
||||||
expect(text).toContain('ACTION');
|
|
||||||
expect(text).toContain('logs');
|
|
||||||
expect(text).toContain('backup');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows RBAC detail with name-scoped resource binding', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'rbac-1',
|
|
||||||
name: 'ha-viewer',
|
|
||||||
subjects: [{ kind: 'User', name: 'alice@test.com' }],
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'view', resource: 'servers', name: 'my-ha' },
|
|
||||||
{ role: 'edit', resource: 'secrets' },
|
|
||||||
],
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'rbac', 'rbac-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('Resource Bindings:');
|
|
||||||
expect(text).toContain('NAME');
|
|
||||||
expect(text).toContain('my-ha');
|
|
||||||
expect(text).toContain('view');
|
|
||||||
expect(text).toContain('servers');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows user with direct RBAC permissions', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'usr-1',
|
|
||||||
email: 'alice@test.com',
|
|
||||||
name: 'Alice',
|
|
||||||
provider: null,
|
|
||||||
});
|
|
||||||
vi.mocked(deps.client.get)
|
|
||||||
.mockResolvedValueOnce([] as never) // users list (resolveNameOrId)
|
|
||||||
.mockResolvedValueOnce([ // RBAC defs
|
|
||||||
{
|
|
||||||
name: 'dev-access',
|
|
||||||
subjects: [{ kind: 'User', name: 'alice@test.com' }],
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'edit', resource: 'servers' },
|
|
||||||
{ role: 'run', action: 'logs' },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
] as never)
|
|
||||||
.mockResolvedValueOnce([] as never); // groups
|
|
||||||
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'user', 'usr-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== User: alice@test.com ===');
|
|
||||||
expect(text).toContain('Access:');
|
|
||||||
expect(text).toContain('Direct (dev-access)');
|
|
||||||
expect(text).toContain('Resources:');
|
|
||||||
expect(text).toContain('edit');
|
|
||||||
expect(text).toContain('servers');
|
|
||||||
expect(text).toContain('Operations:');
|
|
||||||
expect(text).toContain('logs');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows user with inherited group permissions', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'usr-1',
|
|
||||||
email: 'bob@test.com',
|
|
||||||
name: 'Bob',
|
|
||||||
provider: null,
|
|
||||||
});
|
|
||||||
vi.mocked(deps.client.get)
|
|
||||||
.mockResolvedValueOnce([] as never) // users list
|
|
||||||
.mockResolvedValueOnce([ // RBAC defs
|
|
||||||
{
|
|
||||||
name: 'team-perms',
|
|
||||||
subjects: [{ kind: 'Group', name: 'dev-team' }],
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'view', resource: '*' },
|
|
||||||
{ role: 'run', action: 'backup' },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
] as never)
|
|
||||||
.mockResolvedValueOnce([ // groups
|
|
||||||
{ name: 'dev-team', members: [{ user: { email: 'bob@test.com' } }] },
|
|
||||||
] as never);
|
|
||||||
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'user', 'usr-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('Groups:');
|
|
||||||
expect(text).toContain('dev-team');
|
|
||||||
expect(text).toContain('Access:');
|
|
||||||
expect(text).toContain('Inherited (dev-team)');
|
|
||||||
expect(text).toContain('view');
|
|
||||||
expect(text).toContain('*');
|
|
||||||
expect(text).toContain('backup');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows user with no permissions', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'usr-1',
|
|
||||||
email: 'nobody@test.com',
|
|
||||||
name: null,
|
|
||||||
provider: null,
|
|
||||||
});
|
|
||||||
vi.mocked(deps.client.get)
|
|
||||||
.mockResolvedValueOnce([] as never)
|
|
||||||
.mockResolvedValueOnce([] as never)
|
|
||||||
.mockResolvedValueOnce([] as never);
|
|
||||||
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'user', 'usr-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('Access: (none)');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows group with RBAC permissions', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'grp-1',
|
|
||||||
name: 'admin',
|
|
||||||
description: 'Admin group',
|
|
||||||
members: [{ user: { email: 'alice@test.com' } }],
|
|
||||||
});
|
|
||||||
vi.mocked(deps.client.get)
|
|
||||||
.mockResolvedValueOnce([] as never) // groups list (resolveNameOrId)
|
|
||||||
.mockResolvedValueOnce([ // RBAC defs
|
|
||||||
{
|
|
||||||
name: 'admin-access',
|
|
||||||
subjects: [{ kind: 'Group', name: 'admin' }],
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'edit', resource: '*' },
|
|
||||||
{ role: 'run', action: 'backup' },
|
|
||||||
{ role: 'run', action: 'restore' },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
] as never);
|
|
||||||
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'group', 'grp-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== Group: admin ===');
|
|
||||||
expect(text).toContain('Access:');
|
|
||||||
expect(text).toContain('Granted (admin-access)');
|
|
||||||
expect(text).toContain('edit');
|
|
||||||
expect(text).toContain('*');
|
|
||||||
expect(text).toContain('backup');
|
|
||||||
expect(text).toContain('restore');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows group with name-scoped permissions', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'grp-1',
|
|
||||||
name: 'ha-team',
|
|
||||||
description: 'HA team',
|
|
||||||
members: [],
|
|
||||||
});
|
|
||||||
vi.mocked(deps.client.get)
|
|
||||||
.mockResolvedValueOnce([] as never)
|
|
||||||
.mockResolvedValueOnce([ // RBAC defs
|
|
||||||
{
|
|
||||||
name: 'ha-access',
|
|
||||||
subjects: [{ kind: 'Group', name: 'ha-team' }],
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'edit', resource: 'servers', name: 'my-ha' },
|
|
||||||
{ role: 'view', resource: 'secrets' },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
] as never);
|
|
||||||
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'group', 'grp-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('Access:');
|
|
||||||
expect(text).toContain('Granted (ha-access)');
|
|
||||||
expect(text).toContain('my-ha');
|
|
||||||
expect(text).toContain('NAME');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('outputs user detail as JSON', async () => {
|
|
||||||
const deps = makeDeps({ id: 'usr-1', email: 'alice@test.com', name: 'Alice', role: 'ADMIN' });
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'user', 'usr-1', '-o', 'json']);
|
|
||||||
|
|
||||||
const parsed = JSON.parse(deps.output[0] ?? '');
|
|
||||||
expect(parsed.email).toBe('alice@test.com');
|
|
||||||
expect(parsed.role).toBe('ADMIN');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('outputs group detail as YAML', async () => {
|
|
||||||
const deps = makeDeps({ id: 'grp-1', name: 'dev-team', description: 'Devs' });
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'group', 'grp-1', '-o', 'yaml']);
|
|
||||||
|
|
||||||
expect(deps.output[0]).toContain('name: dev-team');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('outputs rbac detail as JSON', async () => {
|
|
||||||
const deps = makeDeps({
|
|
||||||
id: 'rbac-1',
|
|
||||||
name: 'devs',
|
|
||||||
subjects: [{ kind: 'User', name: 'a@b.com' }],
|
|
||||||
roleBindings: [{ role: 'edit', resource: 'servers' }],
|
|
||||||
});
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'rbac', 'rbac-1', '-o', 'json']);
|
|
||||||
|
|
||||||
const parsed = JSON.parse(deps.output[0] ?? '');
|
|
||||||
expect(parsed.subjects).toHaveLength(1);
|
|
||||||
expect(parsed.roleBindings[0].role).toBe('edit');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,153 +0,0 @@
|
|||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
||||||
import { readFileSync, writeFileSync } from 'node:fs';
|
|
||||||
import yaml from 'js-yaml';
|
|
||||||
import { createEditCommand } from '../../src/commands/edit.js';
|
|
||||||
import type { ApiClient } from '../../src/api-client.js';
|
|
||||||
|
|
||||||
function mockClient(): ApiClient {
|
|
||||||
return {
|
|
||||||
get: vi.fn(async () => ({})),
|
|
||||||
post: vi.fn(async () => ({})),
|
|
||||||
put: vi.fn(async () => ({})),
|
|
||||||
delete: vi.fn(async () => {}),
|
|
||||||
} as unknown as ApiClient;
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('edit command', () => {
|
|
||||||
let client: ReturnType<typeof mockClient>;
|
|
||||||
let output: string[];
|
|
||||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
client = mockClient();
|
|
||||||
output = [];
|
|
||||||
});
|
|
||||||
|
|
||||||
it('fetches server, opens editor, applies changes on save', async () => {
|
|
||||||
// GET /api/v1/servers returns list for resolveNameOrId
|
|
||||||
vi.mocked(client.get).mockImplementation(async (path: string) => {
|
|
||||||
if (path === '/api/v1/servers') {
|
|
||||||
return [{ id: 'srv-1', name: 'ha-mcp' }];
|
|
||||||
}
|
|
||||||
// GET /api/v1/servers/srv-1 returns full server
|
|
||||||
return {
|
|
||||||
id: 'srv-1',
|
|
||||||
name: 'ha-mcp',
|
|
||||||
description: 'Old desc',
|
|
||||||
transport: 'STDIO',
|
|
||||||
replicas: 1,
|
|
||||||
createdAt: '2025-01-01',
|
|
||||||
updatedAt: '2025-01-01',
|
|
||||||
version: 1,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
const cmd = createEditCommand({
|
|
||||||
client,
|
|
||||||
log,
|
|
||||||
getEditor: () => 'vi',
|
|
||||||
openEditor: (filePath) => {
|
|
||||||
// Simulate user editing the file
|
|
||||||
const content = readFileSync(filePath, 'utf-8');
|
|
||||||
const modified = content
|
|
||||||
.replace('Old desc', 'New desc')
|
|
||||||
.replace('replicas: 1', 'replicas: 3');
|
|
||||||
writeFileSync(filePath, modified, 'utf-8');
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await cmd.parseAsync(['server', 'ha-mcp'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.put).toHaveBeenCalledWith('/api/v1/servers/srv-1', expect.objectContaining({
|
|
||||||
description: 'New desc',
|
|
||||||
replicas: 3,
|
|
||||||
}));
|
|
||||||
expect(output.join('\n')).toContain("server 'ha-mcp' updated");
|
|
||||||
});
|
|
||||||
|
|
||||||
it('detects no changes and skips PUT', async () => {
|
|
||||||
vi.mocked(client.get).mockImplementation(async (path: string) => {
|
|
||||||
if (path === '/api/v1/servers') return [{ id: 'srv-1', name: 'test' }];
|
|
||||||
return {
|
|
||||||
id: 'srv-1', name: 'test', description: '', transport: 'STDIO',
|
|
||||||
createdAt: '2025-01-01', updatedAt: '2025-01-01', version: 1,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
const cmd = createEditCommand({
|
|
||||||
client,
|
|
||||||
log,
|
|
||||||
getEditor: () => 'vi',
|
|
||||||
openEditor: () => {
|
|
||||||
// Don't modify the file
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await cmd.parseAsync(['server', 'test'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.put).not.toHaveBeenCalled();
|
|
||||||
expect(output.join('\n')).toContain("unchanged");
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles empty file as cancel', async () => {
|
|
||||||
vi.mocked(client.get).mockImplementation(async (path: string) => {
|
|
||||||
if (path === '/api/v1/servers') return [{ id: 'srv-1', name: 'test' }];
|
|
||||||
return { id: 'srv-1', name: 'test', createdAt: '2025-01-01', updatedAt: '2025-01-01', version: 1 };
|
|
||||||
});
|
|
||||||
|
|
||||||
const cmd = createEditCommand({
|
|
||||||
client,
|
|
||||||
log,
|
|
||||||
getEditor: () => 'vi',
|
|
||||||
openEditor: (filePath) => {
|
|
||||||
writeFileSync(filePath, '', 'utf-8');
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await cmd.parseAsync(['server', 'test'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.put).not.toHaveBeenCalled();
|
|
||||||
expect(output.join('\n')).toContain('cancelled');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('strips read-only fields from editor content', async () => {
|
|
||||||
vi.mocked(client.get).mockImplementation(async (path: string) => {
|
|
||||||
if (path === '/api/v1/servers') return [{ id: 'srv-1', name: 'test' }];
|
|
||||||
return {
|
|
||||||
id: 'srv-1', name: 'test', description: '', transport: 'STDIO',
|
|
||||||
createdAt: '2025-01-01', updatedAt: '2025-01-01', version: 1,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
let editorContent = '';
|
|
||||||
const cmd = createEditCommand({
|
|
||||||
client,
|
|
||||||
log,
|
|
||||||
getEditor: () => 'vi',
|
|
||||||
openEditor: (filePath) => {
|
|
||||||
editorContent = readFileSync(filePath, 'utf-8');
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await cmd.parseAsync(['server', 'test'], { from: 'user' });
|
|
||||||
|
|
||||||
// The editor content should NOT contain read-only fields
|
|
||||||
expect(editorContent).not.toContain('id:');
|
|
||||||
expect(editorContent).not.toContain('createdAt');
|
|
||||||
expect(editorContent).not.toContain('updatedAt');
|
|
||||||
expect(editorContent).not.toContain('version');
|
|
||||||
// But should contain editable fields
|
|
||||||
expect(editorContent).toContain('name:');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('rejects edit instance with error message', async () => {
|
|
||||||
const cmd = createEditCommand({ client, log });
|
|
||||||
|
|
||||||
await cmd.parseAsync(['instance', 'inst-1'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.get).not.toHaveBeenCalled();
|
|
||||||
expect(client.put).not.toHaveBeenCalled();
|
|
||||||
expect(output.join('\n')).toContain('immutable');
|
|
||||||
});
|
|
||||||
|
|
||||||
});
|
|
||||||
@@ -1,254 +0,0 @@
|
|||||||
import { describe, it, expect, vi } from 'vitest';
|
|
||||||
import { createGetCommand } from '../../src/commands/get.js';
|
|
||||||
import type { GetCommandDeps } from '../../src/commands/get.js';
|
|
||||||
|
|
||||||
function makeDeps(items: unknown[] = []): GetCommandDeps & { output: string[] } {
|
|
||||||
const output: string[] = [];
|
|
||||||
return {
|
|
||||||
output,
|
|
||||||
fetchResource: vi.fn(async () => items),
|
|
||||||
log: (...args: string[]) => output.push(args.join(' ')),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('get command', () => {
|
|
||||||
it('lists servers in table format', async () => {
|
|
||||||
const deps = makeDeps([
|
|
||||||
{ id: 'srv-1', name: 'slack', transport: 'STDIO', packageName: '@slack/mcp', dockerImage: null },
|
|
||||||
{ id: 'srv-2', name: 'github', transport: 'SSE', packageName: null, dockerImage: 'ghcr.io/github-mcp' },
|
|
||||||
]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'servers']);
|
|
||||||
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('servers', undefined);
|
|
||||||
expect(deps.output[0]).toContain('NAME');
|
|
||||||
expect(deps.output[0]).toContain('TRANSPORT');
|
|
||||||
expect(deps.output.join('\n')).toContain('slack');
|
|
||||||
expect(deps.output.join('\n')).toContain('github');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('resolves resource aliases', async () => {
|
|
||||||
const deps = makeDeps([]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'srv']);
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('servers', undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('passes ID when provided', async () => {
|
|
||||||
const deps = makeDeps([{ id: 'srv-1', name: 'slack' }]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'servers', 'srv-1']);
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('servers', 'srv-1');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('outputs apply-compatible JSON format', async () => {
|
|
||||||
const deps = makeDeps([{ id: 'srv-1', name: 'slack', createdAt: '2025-01-01', updatedAt: '2025-01-01', version: 1 }]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'servers', '-o', 'json']);
|
|
||||||
|
|
||||||
const parsed = JSON.parse(deps.output[0] ?? '');
|
|
||||||
// Wrapped in resource key, internal fields stripped
|
|
||||||
expect(parsed).toHaveProperty('servers');
|
|
||||||
expect(parsed.servers[0].name).toBe('slack');
|
|
||||||
expect(parsed.servers[0]).not.toHaveProperty('id');
|
|
||||||
expect(parsed.servers[0]).not.toHaveProperty('createdAt');
|
|
||||||
expect(parsed.servers[0]).not.toHaveProperty('updatedAt');
|
|
||||||
expect(parsed.servers[0]).not.toHaveProperty('version');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('outputs apply-compatible YAML format', async () => {
|
|
||||||
const deps = makeDeps([{ id: 'srv-1', name: 'slack', createdAt: '2025-01-01' }]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'servers', '-o', 'yaml']);
|
|
||||||
const text = deps.output[0];
|
|
||||||
expect(text).toContain('servers:');
|
|
||||||
expect(text).toContain('name: slack');
|
|
||||||
expect(text).not.toContain('id:');
|
|
||||||
expect(text).not.toContain('createdAt:');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('lists instances with correct columns', async () => {
|
|
||||||
const deps = makeDeps([
|
|
||||||
{ id: 'inst-1', serverId: 'srv-1', server: { name: 'my-grafana' }, status: 'RUNNING', containerId: 'abc123def456', port: 3000 },
|
|
||||||
]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'instances']);
|
|
||||||
expect(deps.output[0]).toContain('NAME');
|
|
||||||
expect(deps.output[0]).toContain('STATUS');
|
|
||||||
expect(deps.output.join('\n')).toContain('my-grafana');
|
|
||||||
expect(deps.output.join('\n')).toContain('RUNNING');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows no results message for empty list', async () => {
|
|
||||||
const deps = makeDeps([]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'servers']);
|
|
||||||
expect(deps.output[0]).toContain('No servers found');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('lists users with correct columns (no ROLE column)', async () => {
|
|
||||||
const deps = makeDeps([
|
|
||||||
{ id: 'usr-1', email: 'alice@test.com', name: 'Alice', provider: null },
|
|
||||||
{ id: 'usr-2', email: 'bob@test.com', name: null, provider: 'oidc' },
|
|
||||||
]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'users']);
|
|
||||||
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('users', undefined);
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('EMAIL');
|
|
||||||
expect(text).toContain('NAME');
|
|
||||||
expect(text).not.toContain('ROLE');
|
|
||||||
expect(text).toContain('PROVIDER');
|
|
||||||
expect(text).toContain('alice@test.com');
|
|
||||||
expect(text).toContain('Alice');
|
|
||||||
expect(text).toContain('bob@test.com');
|
|
||||||
expect(text).toContain('oidc');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('resolves user alias', async () => {
|
|
||||||
const deps = makeDeps([]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'user']);
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('users', undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('lists groups with correct columns', async () => {
|
|
||||||
const deps = makeDeps([
|
|
||||||
{
|
|
||||||
id: 'grp-1',
|
|
||||||
name: 'dev-team',
|
|
||||||
description: 'Developers',
|
|
||||||
members: [{ user: { email: 'alice@test.com' } }, { user: { email: 'bob@test.com' } }],
|
|
||||||
},
|
|
||||||
{ id: 'grp-2', name: 'ops-team', description: 'Operations', members: [] },
|
|
||||||
]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'groups']);
|
|
||||||
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('groups', undefined);
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('NAME');
|
|
||||||
expect(text).toContain('MEMBERS');
|
|
||||||
expect(text).toContain('DESCRIPTION');
|
|
||||||
expect(text).toContain('dev-team');
|
|
||||||
expect(text).toContain('2');
|
|
||||||
expect(text).toContain('ops-team');
|
|
||||||
expect(text).toContain('0');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('resolves group alias', async () => {
|
|
||||||
const deps = makeDeps([]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'group']);
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('groups', undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('lists rbac definitions with correct columns', async () => {
|
|
||||||
const deps = makeDeps([
|
|
||||||
{
|
|
||||||
id: 'rbac-1',
|
|
||||||
name: 'admins',
|
|
||||||
subjects: [{ kind: 'User', name: 'admin@test.com' }],
|
|
||||||
roleBindings: [{ role: 'edit', resource: '*' }],
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'rbac']);
|
|
||||||
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('rbac', undefined);
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('NAME');
|
|
||||||
expect(text).toContain('SUBJECTS');
|
|
||||||
expect(text).toContain('BINDINGS');
|
|
||||||
expect(text).toContain('admins');
|
|
||||||
expect(text).toContain('User:admin@test.com');
|
|
||||||
expect(text).toContain('edit:*');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('resolves rbac-definition alias', async () => {
|
|
||||||
const deps = makeDeps([]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'rbac-definition']);
|
|
||||||
expect(deps.fetchResource).toHaveBeenCalledWith('rbac', undefined);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('lists projects with new columns', async () => {
|
|
||||||
const deps = makeDeps([{
|
|
||||||
id: 'proj-1',
|
|
||||||
name: 'smart-home',
|
|
||||||
description: 'Home automation',
|
|
||||||
proxyMode: 'filtered',
|
|
||||||
ownerId: 'usr-1',
|
|
||||||
servers: [{ server: { name: 'grafana' } }],
|
|
||||||
}]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'projects']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('MODE');
|
|
||||||
expect(text).toContain('SERVERS');
|
|
||||||
expect(text).toContain('smart-home');
|
|
||||||
expect(text).toContain('filtered');
|
|
||||||
expect(text).toContain('1');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('displays mixed resource and operation bindings', async () => {
|
|
||||||
const deps = makeDeps([
|
|
||||||
{
|
|
||||||
id: 'rbac-1',
|
|
||||||
name: 'admin-access',
|
|
||||||
subjects: [{ kind: 'Group', name: 'admin' }],
|
|
||||||
roleBindings: [
|
|
||||||
{ role: 'edit', resource: '*' },
|
|
||||||
{ role: 'run', action: 'logs' },
|
|
||||||
{ role: 'run', action: 'backup' },
|
|
||||||
],
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'rbac']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('edit:*');
|
|
||||||
expect(text).toContain('run>logs');
|
|
||||||
expect(text).toContain('run>backup');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('displays name-scoped resource bindings', async () => {
|
|
||||||
const deps = makeDeps([
|
|
||||||
{
|
|
||||||
id: 'rbac-1',
|
|
||||||
name: 'ha-viewer',
|
|
||||||
subjects: [{ kind: 'User', name: 'alice@test.com' }],
|
|
||||||
roleBindings: [{ role: 'view', resource: 'servers', name: 'my-ha' }],
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'rbac']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('view:servers:my-ha');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows no results message for empty users list', async () => {
|
|
||||||
const deps = makeDeps([]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'users']);
|
|
||||||
expect(deps.output[0]).toContain('No users found');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows no results message for empty groups list', async () => {
|
|
||||||
const deps = makeDeps([]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'groups']);
|
|
||||||
expect(deps.output[0]).toContain('No groups found');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows no results message for empty rbac list', async () => {
|
|
||||||
const deps = makeDeps([]);
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'rbac']);
|
|
||||||
expect(deps.output[0]).toContain('No rbac found');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,148 +0,0 @@
|
|||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
||||||
import { createDeleteCommand } from '../../src/commands/delete.js';
|
|
||||||
import { createLogsCommand } from '../../src/commands/logs.js';
|
|
||||||
import type { ApiClient } from '../../src/api-client.js';
|
|
||||||
|
|
||||||
function mockClient(): ApiClient {
|
|
||||||
return {
|
|
||||||
get: vi.fn(async () => []),
|
|
||||||
post: vi.fn(async () => ({})),
|
|
||||||
put: vi.fn(async () => ({})),
|
|
||||||
delete: vi.fn(async () => {}),
|
|
||||||
} as unknown as ApiClient;
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('delete command', () => {
|
|
||||||
let client: ReturnType<typeof mockClient>;
|
|
||||||
let output: string[];
|
|
||||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
client = mockClient();
|
|
||||||
output = [];
|
|
||||||
});
|
|
||||||
|
|
||||||
it('deletes an instance by ID', async () => {
|
|
||||||
const cmd = createDeleteCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['instance', 'inst-1'], { from: 'user' });
|
|
||||||
expect(client.delete).toHaveBeenCalledWith('/api/v1/instances/inst-1');
|
|
||||||
expect(output.join('\n')).toContain('deleted');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('deletes a server by ID', async () => {
|
|
||||||
const cmd = createDeleteCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['server', 'srv-1'], { from: 'user' });
|
|
||||||
expect(client.delete).toHaveBeenCalledWith('/api/v1/servers/srv-1');
|
|
||||||
expect(output.join('\n')).toContain('deleted');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('resolves server name to ID', async () => {
|
|
||||||
vi.mocked(client.get).mockResolvedValue([
|
|
||||||
{ id: 'srv-abc', name: 'ha-mcp' },
|
|
||||||
]);
|
|
||||||
const cmd = createDeleteCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['server', 'ha-mcp'], { from: 'user' });
|
|
||||||
expect(client.delete).toHaveBeenCalledWith('/api/v1/servers/srv-abc');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('deletes a project', async () => {
|
|
||||||
const cmd = createDeleteCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['project', 'proj-1'], { from: 'user' });
|
|
||||||
expect(client.delete).toHaveBeenCalledWith('/api/v1/projects/proj-1');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('accepts resource aliases', async () => {
|
|
||||||
const cmd = createDeleteCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['srv', 'srv-1'], { from: 'user' });
|
|
||||||
expect(client.delete).toHaveBeenCalledWith('/api/v1/servers/srv-1');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('logs command', () => {
|
|
||||||
let client: ReturnType<typeof mockClient>;
|
|
||||||
let output: string[];
|
|
||||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
client = mockClient();
|
|
||||||
output = [];
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows logs by instance ID', async () => {
|
|
||||||
vi.mocked(client.get)
|
|
||||||
.mockResolvedValueOnce({ id: 'inst-1', status: 'RUNNING' } as never) // instance lookup
|
|
||||||
.mockResolvedValueOnce({ stdout: 'hello world\n', stderr: '' } as never); // logs
|
|
||||||
const cmd = createLogsCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['inst-1'], { from: 'user' });
|
|
||||||
expect(client.get).toHaveBeenCalledWith('/api/v1/instances/inst-1');
|
|
||||||
expect(client.get).toHaveBeenCalledWith('/api/v1/instances/inst-1/logs');
|
|
||||||
expect(output.join('\n')).toContain('hello world');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('resolves server name to instance ID', async () => {
|
|
||||||
vi.mocked(client.get)
|
|
||||||
.mockRejectedValueOnce(new Error('not found')) // instance lookup fails
|
|
||||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'my-grafana' }] as never) // servers list
|
|
||||||
.mockResolvedValueOnce([{ id: 'inst-1', status: 'RUNNING', containerId: 'abc' }] as never) // instances for server
|
|
||||||
.mockResolvedValueOnce({ stdout: 'grafana logs\n', stderr: '' } as never); // logs
|
|
||||||
const cmd = createLogsCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['my-grafana'], { from: 'user' });
|
|
||||||
expect(client.get).toHaveBeenCalledWith('/api/v1/instances/inst-1/logs');
|
|
||||||
expect(output.join('\n')).toContain('grafana logs');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('picks RUNNING instance over others', async () => {
|
|
||||||
vi.mocked(client.get)
|
|
||||||
.mockRejectedValueOnce(new Error('not found'))
|
|
||||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'ha-mcp' }] as never)
|
|
||||||
.mockResolvedValueOnce([
|
|
||||||
{ id: 'inst-err', status: 'ERROR', containerId: null },
|
|
||||||
{ id: 'inst-ok', status: 'RUNNING', containerId: 'abc' },
|
|
||||||
] as never)
|
|
||||||
.mockResolvedValueOnce({ stdout: 'running instance\n', stderr: '' } as never);
|
|
||||||
const cmd = createLogsCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['ha-mcp'], { from: 'user' });
|
|
||||||
expect(client.get).toHaveBeenCalledWith('/api/v1/instances/inst-ok/logs');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('selects specific replica with --instance', async () => {
|
|
||||||
vi.mocked(client.get)
|
|
||||||
.mockRejectedValueOnce(new Error('not found'))
|
|
||||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'ha-mcp' }] as never)
|
|
||||||
.mockResolvedValueOnce([
|
|
||||||
{ id: 'inst-0', status: 'RUNNING', containerId: 'a' },
|
|
||||||
{ id: 'inst-1', status: 'RUNNING', containerId: 'b' },
|
|
||||||
] as never)
|
|
||||||
.mockResolvedValueOnce({ stdout: 'replica 1\n', stderr: '' } as never);
|
|
||||||
const cmd = createLogsCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['ha-mcp', '-i', '1'], { from: 'user' });
|
|
||||||
expect(client.get).toHaveBeenCalledWith('/api/v1/instances/inst-1/logs');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws on out-of-range --instance index', async () => {
|
|
||||||
vi.mocked(client.get)
|
|
||||||
.mockRejectedValueOnce(new Error('not found'))
|
|
||||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'ha-mcp' }] as never)
|
|
||||||
.mockResolvedValueOnce([{ id: 'inst-0', status: 'RUNNING' }] as never);
|
|
||||||
const cmd = createLogsCommand({ client, log });
|
|
||||||
await expect(cmd.parseAsync(['ha-mcp', '-i', '5'], { from: 'user' })).rejects.toThrow('out of range');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('throws when server has no instances', async () => {
|
|
||||||
vi.mocked(client.get)
|
|
||||||
.mockRejectedValueOnce(new Error('not found'))
|
|
||||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'empty-srv' }] as never)
|
|
||||||
.mockResolvedValueOnce([] as never);
|
|
||||||
const cmd = createLogsCommand({ client, log });
|
|
||||||
await expect(cmd.parseAsync(['empty-srv'], { from: 'user' })).rejects.toThrow('No instances found');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('passes tail option', async () => {
|
|
||||||
vi.mocked(client.get)
|
|
||||||
.mockResolvedValueOnce({ id: 'inst-1' } as never)
|
|
||||||
.mockResolvedValueOnce({ stdout: '', stderr: '' } as never);
|
|
||||||
const cmd = createLogsCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['inst-1', '-t', '50'], { from: 'user' });
|
|
||||||
expect(client.get).toHaveBeenCalledWith('/api/v1/instances/inst-1/logs?tail=50');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,116 +0,0 @@
|
|||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
||||||
import { createCreateCommand } from '../../src/commands/create.js';
|
|
||||||
import { createGetCommand } from '../../src/commands/get.js';
|
|
||||||
import { createDescribeCommand } from '../../src/commands/describe.js';
|
|
||||||
import { type ApiClient, ApiError } from '../../src/api-client.js';
|
|
||||||
|
|
||||||
function mockClient(): ApiClient {
|
|
||||||
return {
|
|
||||||
get: vi.fn(async () => []),
|
|
||||||
post: vi.fn(async () => ({ id: 'new-id', name: 'test' })),
|
|
||||||
put: vi.fn(async () => ({})),
|
|
||||||
delete: vi.fn(async () => {}),
|
|
||||||
} as unknown as ApiClient;
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('project with new fields', () => {
|
|
||||||
let client: ReturnType<typeof mockClient>;
|
|
||||||
let output: string[];
|
|
||||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
client = mockClient();
|
|
||||||
output = [];
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('create project with enhanced options', () => {
|
|
||||||
it('creates project with proxy mode and servers', async () => {
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync([
|
|
||||||
'project', 'smart-home',
|
|
||||||
'-d', 'Smart home project',
|
|
||||||
'--proxy-mode', 'filtered',
|
|
||||||
'--proxy-mode-llm-provider', 'gemini-cli',
|
|
||||||
'--proxy-mode-llm-model', 'gemini-2.0-flash',
|
|
||||||
'--server', 'my-grafana',
|
|
||||||
'--server', 'my-ha',
|
|
||||||
], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/projects', expect.objectContaining({
|
|
||||||
name: 'smart-home',
|
|
||||||
description: 'Smart home project',
|
|
||||||
proxyMode: 'filtered',
|
|
||||||
llmProvider: 'gemini-cli',
|
|
||||||
llmModel: 'gemini-2.0-flash',
|
|
||||||
servers: ['my-grafana', 'my-ha'],
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
|
|
||||||
it('defaults proxy mode to direct', async () => {
|
|
||||||
const cmd = createCreateCommand({ client, log });
|
|
||||||
await cmd.parseAsync(['project', 'basic'], { from: 'user' });
|
|
||||||
|
|
||||||
expect(client.post).toHaveBeenCalledWith('/api/v1/projects', expect.objectContaining({
|
|
||||||
proxyMode: 'direct',
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('get projects shows new columns', () => {
|
|
||||||
it('shows MODE and SERVERS columns', async () => {
|
|
||||||
const deps = {
|
|
||||||
output: [] as string[],
|
|
||||||
fetchResource: vi.fn(async () => [{
|
|
||||||
id: 'proj-1',
|
|
||||||
name: 'smart-home',
|
|
||||||
description: 'Test',
|
|
||||||
proxyMode: 'filtered',
|
|
||||||
ownerId: 'user-1',
|
|
||||||
servers: [{ server: { name: 'grafana' } }, { server: { name: 'ha' } }],
|
|
||||||
}]),
|
|
||||||
log: (...args: string[]) => deps.output.push(args.join(' ')),
|
|
||||||
};
|
|
||||||
const cmd = createGetCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'projects']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('MODE');
|
|
||||||
expect(text).toContain('SERVERS');
|
|
||||||
expect(text).toContain('smart-home');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('describe project shows full detail', () => {
|
|
||||||
it('shows servers and proxy config', async () => {
|
|
||||||
const deps = {
|
|
||||||
output: [] as string[],
|
|
||||||
client: mockClient(),
|
|
||||||
fetchResource: vi.fn(async () => ({
|
|
||||||
id: 'proj-1',
|
|
||||||
name: 'smart-home',
|
|
||||||
description: 'Smart home',
|
|
||||||
proxyMode: 'filtered',
|
|
||||||
llmProvider: 'gemini-cli',
|
|
||||||
llmModel: 'gemini-2.0-flash',
|
|
||||||
ownerId: 'user-1',
|
|
||||||
servers: [
|
|
||||||
{ server: { name: 'my-grafana' } },
|
|
||||||
{ server: { name: 'my-ha' } },
|
|
||||||
],
|
|
||||||
createdAt: '2025-01-01',
|
|
||||||
updatedAt: '2025-01-01',
|
|
||||||
})),
|
|
||||||
log: (...args: string[]) => deps.output.push(args.join(' ')),
|
|
||||||
};
|
|
||||||
const cmd = createDescribeCommand(deps);
|
|
||||||
await cmd.parseAsync(['node', 'test', 'project', 'proj-1']);
|
|
||||||
|
|
||||||
const text = deps.output.join('\n');
|
|
||||||
expect(text).toContain('=== Project: smart-home ===');
|
|
||||||
expect(text).toContain('filtered');
|
|
||||||
expect(text).toContain('gemini-cli');
|
|
||||||
expect(text).toContain('my-grafana');
|
|
||||||
expect(text).toContain('my-ha');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -4,7 +4,6 @@ import { join } from 'node:path';
|
|||||||
import { tmpdir } from 'node:os';
|
import { tmpdir } from 'node:os';
|
||||||
import { createStatusCommand } from '../../src/commands/status.js';
|
import { createStatusCommand } from '../../src/commands/status.js';
|
||||||
import { saveConfig, DEFAULT_CONFIG } from '../../src/config/index.js';
|
import { saveConfig, DEFAULT_CONFIG } from '../../src/config/index.js';
|
||||||
import { saveCredentials } from '../../src/auth/index.js';
|
|
||||||
|
|
||||||
let tempDir: string;
|
let tempDir: string;
|
||||||
let output: string[];
|
let output: string[];
|
||||||
@@ -26,101 +25,67 @@ describe('status command', () => {
|
|||||||
it('shows status in table format', async () => {
|
it('shows status in table format', async () => {
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand({
|
||||||
configDeps: { configDir: tempDir },
|
configDeps: { configDir: tempDir },
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
log,
|
||||||
checkHealth: async () => true,
|
checkDaemon: async () => true,
|
||||||
});
|
});
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
const out = output.join('\n');
|
expect(output.join('\n')).toContain('mcpctl v');
|
||||||
expect(out).toContain('mcpctl v');
|
expect(output.join('\n')).toContain('connected');
|
||||||
expect(out).toContain('mcplocal:');
|
|
||||||
expect(out).toContain('mcpd:');
|
|
||||||
expect(out).toContain('connected');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('shows unreachable when daemons are down', async () => {
|
it('shows unreachable when daemon is down', async () => {
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand({
|
||||||
configDeps: { configDir: tempDir },
|
configDeps: { configDir: tempDir },
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
log,
|
||||||
checkHealth: async () => false,
|
checkDaemon: async () => false,
|
||||||
});
|
});
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
expect(output.join('\n')).toContain('unreachable');
|
expect(output.join('\n')).toContain('unreachable');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('shows not logged in when no credentials', async () => {
|
|
||||||
const cmd = createStatusCommand({
|
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
|
||||||
checkHealth: async () => true,
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
|
||||||
expect(output.join('\n')).toContain('not logged in');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows logged in user when credentials exist', async () => {
|
|
||||||
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'alice@example.com' }, { configDir: tempDir });
|
|
||||||
const cmd = createStatusCommand({
|
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
|
||||||
checkHealth: async () => true,
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
|
||||||
expect(output.join('\n')).toContain('logged in as alice@example.com');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('shows status in JSON format', async () => {
|
it('shows status in JSON format', async () => {
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand({
|
||||||
configDeps: { configDir: tempDir },
|
configDeps: { configDir: tempDir },
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
log,
|
||||||
checkHealth: async () => true,
|
checkDaemon: async () => true,
|
||||||
});
|
});
|
||||||
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
||||||
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
||||||
expect(parsed['version']).toBe('0.1.0');
|
expect(parsed['version']).toBe('0.1.0');
|
||||||
expect(parsed['mcplocalReachable']).toBe(true);
|
expect(parsed['daemonReachable']).toBe(true);
|
||||||
expect(parsed['mcpdReachable']).toBe(true);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('shows status in YAML format', async () => {
|
it('shows status in YAML format', async () => {
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand({
|
||||||
configDeps: { configDir: tempDir },
|
configDeps: { configDir: tempDir },
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
log,
|
||||||
checkHealth: async () => false,
|
checkDaemon: async () => false,
|
||||||
});
|
});
|
||||||
await cmd.parseAsync(['-o', 'yaml'], { from: 'user' });
|
await cmd.parseAsync(['-o', 'yaml'], { from: 'user' });
|
||||||
expect(output[0]).toContain('mcplocalReachable: false');
|
expect(output[0]).toContain('daemonReachable: false');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('checks correct URLs from config', async () => {
|
it('uses custom daemon URL from config', async () => {
|
||||||
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://local:3200', mcpdUrl: 'http://remote:3100' }, { configDir: tempDir });
|
saveConfig({ ...DEFAULT_CONFIG, daemonUrl: 'http://custom:5555' }, { configDir: tempDir });
|
||||||
const checkedUrls: string[] = [];
|
let checkedUrl = '';
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand({
|
||||||
configDeps: { configDir: tempDir },
|
configDeps: { configDir: tempDir },
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
log,
|
||||||
checkHealth: async (url) => {
|
checkDaemon: async (url) => {
|
||||||
checkedUrls.push(url);
|
checkedUrl = url;
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
expect(checkedUrls).toContain('http://local:3200');
|
expect(checkedUrl).toBe('http://custom:5555');
|
||||||
expect(checkedUrls).toContain('http://remote:3100');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('shows registries from config', async () => {
|
it('shows registries from config', async () => {
|
||||||
saveConfig({ ...DEFAULT_CONFIG, registries: ['official'] }, { configDir: tempDir });
|
saveConfig({ ...DEFAULT_CONFIG, registries: ['official'] }, { configDir: tempDir });
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand({
|
||||||
configDeps: { configDir: tempDir },
|
configDeps: { configDir: tempDir },
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
log,
|
||||||
checkHealth: async () => true,
|
checkDaemon: async () => true,
|
||||||
});
|
});
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
expect(output.join('\n')).toContain('official');
|
expect(output.join('\n')).toContain('official');
|
||||||
|
|||||||
@@ -28,25 +28,18 @@ describe('loadConfig', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('loads config from file', () => {
|
it('loads config from file', () => {
|
||||||
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://custom:5000' }, { configDir: tempDir });
|
saveConfig({ ...DEFAULT_CONFIG, daemonUrl: 'http://custom:5000' }, { configDir: tempDir });
|
||||||
const config = loadConfig({ configDir: tempDir });
|
const config = loadConfig({ configDir: tempDir });
|
||||||
expect(config.mcplocalUrl).toBe('http://custom:5000');
|
expect(config.daemonUrl).toBe('http://custom:5000');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('applies defaults for missing fields', () => {
|
it('applies defaults for missing fields', () => {
|
||||||
const { writeFileSync } = require('node:fs') as typeof import('node:fs');
|
const { writeFileSync } = require('node:fs') as typeof import('node:fs');
|
||||||
writeFileSync(join(tempDir, 'config.json'), '{"mcplocalUrl":"http://x:1"}');
|
writeFileSync(join(tempDir, 'config.json'), '{"daemonUrl":"http://x:1"}');
|
||||||
const config = loadConfig({ configDir: tempDir });
|
const config = loadConfig({ configDir: tempDir });
|
||||||
expect(config.mcplocalUrl).toBe('http://x:1');
|
expect(config.daemonUrl).toBe('http://x:1');
|
||||||
expect(config.registries).toEqual(['official', 'glama', 'smithery']);
|
expect(config.registries).toEqual(['official', 'glama', 'smithery']);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('backward compat: daemonUrl maps to mcplocalUrl', () => {
|
|
||||||
const { writeFileSync } = require('node:fs') as typeof import('node:fs');
|
|
||||||
writeFileSync(join(tempDir, 'config.json'), '{"daemonUrl":"http://old:3000"}');
|
|
||||||
const config = loadConfig({ configDir: tempDir });
|
|
||||||
expect(config.mcplocalUrl).toBe('http://old:3000');
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('saveConfig', () => {
|
describe('saveConfig', () => {
|
||||||
@@ -64,7 +57,7 @@ describe('saveConfig', () => {
|
|||||||
it('round-trips configuration', () => {
|
it('round-trips configuration', () => {
|
||||||
const custom = {
|
const custom = {
|
||||||
...DEFAULT_CONFIG,
|
...DEFAULT_CONFIG,
|
||||||
mcplocalUrl: 'http://custom:9000',
|
daemonUrl: 'http://custom:9000',
|
||||||
registries: ['official' as const],
|
registries: ['official' as const],
|
||||||
outputFormat: 'json' as const,
|
outputFormat: 'json' as const,
|
||||||
};
|
};
|
||||||
@@ -77,14 +70,14 @@ describe('saveConfig', () => {
|
|||||||
describe('mergeConfig', () => {
|
describe('mergeConfig', () => {
|
||||||
it('merges overrides into existing config', () => {
|
it('merges overrides into existing config', () => {
|
||||||
saveConfig(DEFAULT_CONFIG, { configDir: tempDir });
|
saveConfig(DEFAULT_CONFIG, { configDir: tempDir });
|
||||||
const merged = mergeConfig({ mcplocalUrl: 'http://new:1234' }, { configDir: tempDir });
|
const merged = mergeConfig({ daemonUrl: 'http://new:1234' }, { configDir: tempDir });
|
||||||
expect(merged.mcplocalUrl).toBe('http://new:1234');
|
expect(merged.daemonUrl).toBe('http://new:1234');
|
||||||
expect(merged.registries).toEqual(DEFAULT_CONFIG.registries);
|
expect(merged.registries).toEqual(DEFAULT_CONFIG.registries);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('works when no config file exists', () => {
|
it('works when no config file exists', () => {
|
||||||
const merged = mergeConfig({ outputFormat: 'yaml' }, { configDir: tempDir });
|
const merged = mergeConfig({ outputFormat: 'yaml' }, { configDir: tempDir });
|
||||||
expect(merged.outputFormat).toBe('yaml');
|
expect(merged.outputFormat).toBe('yaml');
|
||||||
expect(merged.mcplocalUrl).toBe('http://localhost:3200');
|
expect(merged.daemonUrl).toBe('http://localhost:3000');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -4,8 +4,7 @@ import { McpctlConfigSchema, DEFAULT_CONFIG } from '../../src/config/schema.js';
|
|||||||
describe('McpctlConfigSchema', () => {
|
describe('McpctlConfigSchema', () => {
|
||||||
it('provides sensible defaults from empty object', () => {
|
it('provides sensible defaults from empty object', () => {
|
||||||
const config = McpctlConfigSchema.parse({});
|
const config = McpctlConfigSchema.parse({});
|
||||||
expect(config.mcplocalUrl).toBe('http://localhost:3200');
|
expect(config.daemonUrl).toBe('http://localhost:3000');
|
||||||
expect(config.mcpdUrl).toBe('http://localhost:3100');
|
|
||||||
expect(config.registries).toEqual(['official', 'glama', 'smithery']);
|
expect(config.registries).toEqual(['official', 'glama', 'smithery']);
|
||||||
expect(config.cacheTTLMs).toBe(3_600_000);
|
expect(config.cacheTTLMs).toBe(3_600_000);
|
||||||
expect(config.outputFormat).toBe('table');
|
expect(config.outputFormat).toBe('table');
|
||||||
@@ -16,8 +15,7 @@ describe('McpctlConfigSchema', () => {
|
|||||||
|
|
||||||
it('validates a full config', () => {
|
it('validates a full config', () => {
|
||||||
const config = McpctlConfigSchema.parse({
|
const config = McpctlConfigSchema.parse({
|
||||||
mcplocalUrl: 'http://local:3200',
|
daemonUrl: 'http://custom:4000',
|
||||||
mcpdUrl: 'http://custom:4000',
|
|
||||||
registries: ['official'],
|
registries: ['official'],
|
||||||
cacheTTLMs: 60_000,
|
cacheTTLMs: 60_000,
|
||||||
httpProxy: 'http://proxy:8080',
|
httpProxy: 'http://proxy:8080',
|
||||||
@@ -25,26 +23,11 @@ describe('McpctlConfigSchema', () => {
|
|||||||
outputFormat: 'json',
|
outputFormat: 'json',
|
||||||
smitheryApiKey: 'sk-test',
|
smitheryApiKey: 'sk-test',
|
||||||
});
|
});
|
||||||
expect(config.mcplocalUrl).toBe('http://local:3200');
|
expect(config.daemonUrl).toBe('http://custom:4000');
|
||||||
expect(config.mcpdUrl).toBe('http://custom:4000');
|
|
||||||
expect(config.registries).toEqual(['official']);
|
expect(config.registries).toEqual(['official']);
|
||||||
expect(config.outputFormat).toBe('json');
|
expect(config.outputFormat).toBe('json');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('backward compat: maps daemonUrl to mcplocalUrl', () => {
|
|
||||||
const config = McpctlConfigSchema.parse({ daemonUrl: 'http://legacy:3000' });
|
|
||||||
expect(config.mcplocalUrl).toBe('http://legacy:3000');
|
|
||||||
expect(config.mcpdUrl).toBe('http://localhost:3100');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('mcplocalUrl takes precedence over daemonUrl', () => {
|
|
||||||
const config = McpctlConfigSchema.parse({
|
|
||||||
daemonUrl: 'http://legacy:3000',
|
|
||||||
mcplocalUrl: 'http://explicit:3200',
|
|
||||||
});
|
|
||||||
expect(config.mcplocalUrl).toBe('http://explicit:3200');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('rejects invalid registry names', () => {
|
it('rejects invalid registry names', () => {
|
||||||
expect(() => McpctlConfigSchema.parse({ registries: ['invalid'] })).toThrow();
|
expect(() => McpctlConfigSchema.parse({ registries: ['invalid'] })).toThrow();
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,76 +0,0 @@
|
|||||||
import { describe, it, expect } from 'vitest';
|
|
||||||
import { createProgram } from '../../src/index.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* End-to-end tests that verify CLI command registration and help output
|
|
||||||
* without requiring a running daemon.
|
|
||||||
*/
|
|
||||||
describe('CLI command registration (e2e)', () => {
|
|
||||||
it('program has all expected commands', () => {
|
|
||||||
const program = createProgram();
|
|
||||||
const commandNames = program.commands.map((c) => c.name());
|
|
||||||
|
|
||||||
expect(commandNames).toContain('config');
|
|
||||||
expect(commandNames).toContain('status');
|
|
||||||
expect(commandNames).toContain('login');
|
|
||||||
expect(commandNames).toContain('logout');
|
|
||||||
expect(commandNames).toContain('get');
|
|
||||||
expect(commandNames).toContain('describe');
|
|
||||||
expect(commandNames).toContain('delete');
|
|
||||||
expect(commandNames).toContain('logs');
|
|
||||||
expect(commandNames).toContain('apply');
|
|
||||||
expect(commandNames).toContain('create');
|
|
||||||
expect(commandNames).toContain('edit');
|
|
||||||
expect(commandNames).toContain('backup');
|
|
||||||
expect(commandNames).toContain('restore');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('old project and claude top-level commands are removed', () => {
|
|
||||||
const program = createProgram();
|
|
||||||
const commandNames = program.commands.map((c) => c.name());
|
|
||||||
expect(commandNames).not.toContain('claude');
|
|
||||||
expect(commandNames).not.toContain('project');
|
|
||||||
expect(commandNames).not.toContain('instance');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('config command has claude-generate and impersonate subcommands', () => {
|
|
||||||
const program = createProgram();
|
|
||||||
const config = program.commands.find((c) => c.name() === 'config');
|
|
||||||
expect(config).toBeDefined();
|
|
||||||
|
|
||||||
const subcommands = config!.commands.map((c) => c.name());
|
|
||||||
expect(subcommands).toContain('claude-generate');
|
|
||||||
expect(subcommands).toContain('impersonate');
|
|
||||||
expect(subcommands).toContain('view');
|
|
||||||
expect(subcommands).toContain('set');
|
|
||||||
expect(subcommands).toContain('path');
|
|
||||||
expect(subcommands).toContain('reset');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('create command has user, group, rbac subcommands', () => {
|
|
||||||
const program = createProgram();
|
|
||||||
const create = program.commands.find((c) => c.name() === 'create');
|
|
||||||
expect(create).toBeDefined();
|
|
||||||
|
|
||||||
const subcommands = create!.commands.map((c) => c.name());
|
|
||||||
expect(subcommands).toContain('server');
|
|
||||||
expect(subcommands).toContain('secret');
|
|
||||||
expect(subcommands).toContain('project');
|
|
||||||
expect(subcommands).toContain('user');
|
|
||||||
expect(subcommands).toContain('group');
|
|
||||||
expect(subcommands).toContain('rbac');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('displays version', () => {
|
|
||||||
const program = createProgram();
|
|
||||||
expect(program.version()).toBeDefined();
|
|
||||||
expect(program.version()).toMatch(/^\d+\.\d+\.\d+$/);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('displays help without error', () => {
|
|
||||||
const program = createProgram();
|
|
||||||
const helpText = program.helpInformation();
|
|
||||||
expect(helpText).toContain('mcpctl');
|
|
||||||
expect(helpText).toContain('Manage MCP servers');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,90 +0,0 @@
|
|||||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
|
||||||
import { RegistryCache } from '../../src/registry/cache.js';
|
|
||||||
import type { RegistryServer, SearchOptions } from '../../src/registry/types.js';
|
|
||||||
|
|
||||||
function makeServer(name: string): RegistryServer {
|
|
||||||
return {
|
|
||||||
name,
|
|
||||||
description: `${name} server`,
|
|
||||||
packages: {},
|
|
||||||
envTemplate: [],
|
|
||||||
transport: 'stdio',
|
|
||||||
popularityScore: 0,
|
|
||||||
verified: false,
|
|
||||||
sourceRegistry: 'official',
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const defaultOptions: SearchOptions = { query: 'test' };
|
|
||||||
|
|
||||||
describe('RegistryCache', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
vi.useFakeTimers();
|
|
||||||
});
|
|
||||||
afterEach(() => {
|
|
||||||
vi.useRealTimers();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns null for cache miss', () => {
|
|
||||||
const cache = new RegistryCache();
|
|
||||||
expect(cache.get('unknown', defaultOptions)).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns data for cache hit within TTL', () => {
|
|
||||||
const cache = new RegistryCache();
|
|
||||||
const data = [makeServer('test')];
|
|
||||||
cache.set('test', defaultOptions, data);
|
|
||||||
expect(cache.get('test', defaultOptions)).toEqual(data);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns null after TTL expires', () => {
|
|
||||||
const cache = new RegistryCache(1000); // 1 second TTL
|
|
||||||
cache.set('test', defaultOptions, [makeServer('test')]);
|
|
||||||
|
|
||||||
vi.advanceTimersByTime(1001);
|
|
||||||
expect(cache.get('test', defaultOptions)).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('generates deterministic cache keys', () => {
|
|
||||||
const cache = new RegistryCache();
|
|
||||||
const data = [makeServer('test')];
|
|
||||||
cache.set('query', { query: 'query', limit: 10 }, data);
|
|
||||||
expect(cache.get('query', { query: 'query', limit: 10 })).toEqual(data);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('generates different keys for different queries', () => {
|
|
||||||
const cache = new RegistryCache();
|
|
||||||
cache.set('a', { query: 'a' }, [makeServer('a')]);
|
|
||||||
expect(cache.get('b', { query: 'b' })).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('tracks hits and misses correctly', () => {
|
|
||||||
const cache = new RegistryCache();
|
|
||||||
cache.set('test', defaultOptions, [makeServer('test')]);
|
|
||||||
|
|
||||||
cache.get('test', defaultOptions); // hit
|
|
||||||
cache.get('test', defaultOptions); // hit
|
|
||||||
cache.get('miss', { query: 'miss' }); // miss
|
|
||||||
|
|
||||||
const ratio = cache.getHitRatio();
|
|
||||||
expect(ratio.hits).toBe(2);
|
|
||||||
expect(ratio.misses).toBe(1);
|
|
||||||
expect(ratio.ratio).toBeCloseTo(2 / 3);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns 0 ratio when no accesses', () => {
|
|
||||||
const cache = new RegistryCache();
|
|
||||||
expect(cache.getHitRatio().ratio).toBe(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('clears all entries and resets metrics', () => {
|
|
||||||
const cache = new RegistryCache();
|
|
||||||
cache.set('a', { query: 'a' }, [makeServer('a')]);
|
|
||||||
cache.get('a', { query: 'a' }); // hit
|
|
||||||
cache.clear();
|
|
||||||
|
|
||||||
expect(cache.get('a', { query: 'a' })).toBeNull();
|
|
||||||
expect(cache.size).toBe(0);
|
|
||||||
expect(cache.getHitRatio().hits).toBe(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,282 +0,0 @@
|
|||||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
|
||||||
import { RegistryClient } from '../../src/registry/client.js';
|
|
||||||
import type { RegistryServer } from '../../src/registry/types.js';
|
|
||||||
|
|
||||||
function makeServer(name: string, source: 'official' | 'glama' | 'smithery'): RegistryServer {
|
|
||||||
return {
|
|
||||||
name,
|
|
||||||
description: `${name} description`,
|
|
||||||
packages: { npm: `@test/${name}` },
|
|
||||||
envTemplate: [],
|
|
||||||
transport: 'stdio',
|
|
||||||
popularityScore: 50,
|
|
||||||
verified: source === 'smithery',
|
|
||||||
sourceRegistry: source,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mock fetch globally
|
|
||||||
const mockFetch = vi.fn();
|
|
||||||
|
|
||||||
beforeEach(() => {
|
|
||||||
vi.stubGlobal('fetch', mockFetch);
|
|
||||||
mockFetch.mockReset();
|
|
||||||
});
|
|
||||||
|
|
||||||
function mockRegistryResponse(source: string, servers: RegistryServer[]): void {
|
|
||||||
mockFetch.mockImplementation((url: string) => {
|
|
||||||
if (url.includes('registry.modelcontextprotocol.io')) {
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({
|
|
||||||
servers: servers
|
|
||||||
.filter((s) => s.sourceRegistry === 'official')
|
|
||||||
.map((s) => ({
|
|
||||||
server: {
|
|
||||||
name: s.name,
|
|
||||||
description: s.description,
|
|
||||||
packages: s.packages.npm !== undefined ? [{
|
|
||||||
registryType: 'npm',
|
|
||||||
identifier: s.packages.npm,
|
|
||||||
transport: { type: 'stdio' },
|
|
||||||
environmentVariables: [],
|
|
||||||
}] : [],
|
|
||||||
remotes: [],
|
|
||||||
},
|
|
||||||
})),
|
|
||||||
metadata: { nextCursor: null, count: 1 },
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (url.includes('glama.ai')) {
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({
|
|
||||||
servers: servers
|
|
||||||
.filter((s) => s.sourceRegistry === 'glama')
|
|
||||||
.map((s) => ({
|
|
||||||
id: s.name,
|
|
||||||
name: s.name,
|
|
||||||
description: s.description,
|
|
||||||
attributes: [],
|
|
||||||
slug: s.packages.npm ?? '',
|
|
||||||
})),
|
|
||||||
pageInfo: { hasNextPage: false, hasPreviousPage: false },
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (url.includes('registry.smithery.ai')) {
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({
|
|
||||||
servers: servers
|
|
||||||
.filter((s) => s.sourceRegistry === 'smithery')
|
|
||||||
.map((s) => ({
|
|
||||||
qualifiedName: s.name,
|
|
||||||
displayName: s.name,
|
|
||||||
description: s.description,
|
|
||||||
verified: s.verified,
|
|
||||||
useCount: s.popularityScore,
|
|
||||||
remote: false,
|
|
||||||
})),
|
|
||||||
pagination: { currentPage: 1, pageSize: 20, totalPages: 1, totalCount: 1 },
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return Promise.reject(new Error(`Unexpected URL: ${url}`));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('RegistryClient', () => {
|
|
||||||
it('queries all enabled registries', async () => {
|
|
||||||
const testServers = [
|
|
||||||
makeServer('slack-official', 'official'),
|
|
||||||
makeServer('slack-glama', 'glama'),
|
|
||||||
makeServer('slack-smithery', 'smithery'),
|
|
||||||
];
|
|
||||||
mockRegistryResponse('all', testServers);
|
|
||||||
|
|
||||||
const client = new RegistryClient();
|
|
||||||
const results = await client.search({ query: 'slack' });
|
|
||||||
|
|
||||||
expect(results.length).toBeGreaterThan(0);
|
|
||||||
expect(mockFetch).toHaveBeenCalledTimes(3);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('uses cached results on second call', async () => {
|
|
||||||
mockRegistryResponse('all', [makeServer('slack', 'official')]);
|
|
||||||
|
|
||||||
const client = new RegistryClient();
|
|
||||||
await client.search({ query: 'slack' });
|
|
||||||
mockFetch.mockClear();
|
|
||||||
await client.search({ query: 'slack' });
|
|
||||||
|
|
||||||
expect(mockFetch).not.toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('filters by registry when specified', async () => {
|
|
||||||
mockRegistryResponse('all', [makeServer('test', 'official')]);
|
|
||||||
|
|
||||||
const client = new RegistryClient();
|
|
||||||
await client.search({ query: 'test', registries: ['official'] });
|
|
||||||
|
|
||||||
expect(mockFetch).toHaveBeenCalledTimes(1);
|
|
||||||
const calledUrl = mockFetch.mock.calls[0]?.[0] as string;
|
|
||||||
expect(calledUrl).toContain('modelcontextprotocol.io');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles partial failures gracefully', async () => {
|
|
||||||
mockFetch.mockImplementation((url: string) => {
|
|
||||||
if (url.includes('glama.ai')) {
|
|
||||||
return Promise.reject(new Error('Network error'));
|
|
||||||
}
|
|
||||||
if (url.includes('registry.smithery.ai')) {
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({
|
|
||||||
servers: [{
|
|
||||||
qualifiedName: 'slack',
|
|
||||||
displayName: 'Slack',
|
|
||||||
description: 'Slack',
|
|
||||||
verified: true,
|
|
||||||
useCount: 100,
|
|
||||||
remote: false,
|
|
||||||
}],
|
|
||||||
pagination: { currentPage: 1, pageSize: 20, totalPages: 1, totalCount: 1 },
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({
|
|
||||||
servers: [],
|
|
||||||
metadata: { nextCursor: null },
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
const client = new RegistryClient();
|
|
||||||
const results = await client.search({ query: 'slack' });
|
|
||||||
|
|
||||||
// Should still return results from successful sources
|
|
||||||
expect(results.length).toBeGreaterThan(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('records error counts on failures', async () => {
|
|
||||||
mockFetch.mockImplementation((url: string) => {
|
|
||||||
if (url.includes('glama.ai')) {
|
|
||||||
return Promise.reject(new Error('fail'));
|
|
||||||
}
|
|
||||||
// Return empty for others
|
|
||||||
if (url.includes('modelcontextprotocol')) {
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({ servers: [], metadata: { nextCursor: null } }),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({
|
|
||||||
servers: [],
|
|
||||||
pagination: { currentPage: 1, pageSize: 20, totalPages: 1, totalCount: 0 },
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
const client = new RegistryClient();
|
|
||||||
await client.search({ query: 'test' });
|
|
||||||
|
|
||||||
const errors = client.getErrorCounts();
|
|
||||||
expect(errors.get('glama')).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('filters by verified when specified', async () => {
|
|
||||||
mockFetch.mockImplementation((url: string) => {
|
|
||||||
if (url.includes('registry.smithery.ai')) {
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({
|
|
||||||
servers: [
|
|
||||||
{ qualifiedName: 'verified', displayName: 'Verified', description: '', verified: true, useCount: 100, remote: false },
|
|
||||||
{ qualifiedName: 'unverified', displayName: 'Unverified', description: '', verified: false, useCount: 50, remote: false },
|
|
||||||
],
|
|
||||||
pagination: { currentPage: 1, pageSize: 20, totalPages: 1, totalCount: 2 },
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({ servers: [], metadata: { nextCursor: null } }),
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// Mock glama too
|
|
||||||
mockFetch.mockImplementation((url: string) => {
|
|
||||||
if (url.includes('registry.smithery.ai')) {
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({
|
|
||||||
servers: [
|
|
||||||
{ qualifiedName: 'verified', displayName: 'Verified', description: '', verified: true, useCount: 100, remote: false },
|
|
||||||
{ qualifiedName: 'unverified', displayName: 'Unverified', description: '', verified: false, useCount: 50, remote: false },
|
|
||||||
],
|
|
||||||
pagination: { currentPage: 1, pageSize: 20, totalPages: 1, totalCount: 2 },
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (url.includes('glama.ai')) {
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({ servers: [], pageInfo: { hasNextPage: false, hasPreviousPage: false } }),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return Promise.resolve({
|
|
||||||
ok: true,
|
|
||||||
json: () => Promise.resolve({ servers: [], metadata: { nextCursor: null } }),
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
const client = new RegistryClient();
|
|
||||||
const results = await client.search({ query: 'test', verified: true });
|
|
||||||
|
|
||||||
for (const r of results) {
|
|
||||||
expect(r.verified).toBe(true);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
it('respects limit option', async () => {
|
|
||||||
mockRegistryResponse('all', [
|
|
||||||
makeServer('a', 'official'),
|
|
||||||
makeServer('b', 'glama'),
|
|
||||||
makeServer('c', 'smithery'),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const client = new RegistryClient();
|
|
||||||
const results = await client.search({ query: 'test', limit: 1 });
|
|
||||||
expect(results.length).toBeLessThanOrEqual(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('records latency metrics', async () => {
|
|
||||||
mockRegistryResponse('all', [makeServer('test', 'official')]);
|
|
||||||
|
|
||||||
const client = new RegistryClient();
|
|
||||||
await client.search({ query: 'test' });
|
|
||||||
|
|
||||||
const latencies = client.getQueryLatencies();
|
|
||||||
expect(latencies.size).toBeGreaterThan(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('clearCache empties cache', async () => {
|
|
||||||
mockRegistryResponse('all', [makeServer('test', 'official')]);
|
|
||||||
|
|
||||||
const client = new RegistryClient();
|
|
||||||
await client.search({ query: 'test' });
|
|
||||||
client.clearCache();
|
|
||||||
mockFetch.mockClear();
|
|
||||||
mockRegistryResponse('all', [makeServer('test', 'official')]);
|
|
||||||
await client.search({ query: 'test' });
|
|
||||||
|
|
||||||
// Should have fetched again after cache clear
|
|
||||||
expect(mockFetch).toHaveBeenCalled();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,105 +0,0 @@
|
|||||||
import { describe, it, expect } from 'vitest';
|
|
||||||
import { deduplicateResults } from '../../src/registry/dedup.js';
|
|
||||||
import type { RegistryServer } from '../../src/registry/types.js';
|
|
||||||
|
|
||||||
function makeServer(overrides: Partial<RegistryServer> = {}): RegistryServer {
|
|
||||||
return {
|
|
||||||
name: 'test-server',
|
|
||||||
description: 'A test server',
|
|
||||||
packages: {},
|
|
||||||
envTemplate: [],
|
|
||||||
transport: 'stdio',
|
|
||||||
popularityScore: 0,
|
|
||||||
verified: false,
|
|
||||||
sourceRegistry: 'official',
|
|
||||||
...overrides,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('deduplicateResults', () => {
|
|
||||||
it('keeps unique servers', () => {
|
|
||||||
const servers = [
|
|
||||||
makeServer({ name: 'server-a', packages: { npm: 'pkg-a' } }),
|
|
||||||
makeServer({ name: 'server-b', packages: { npm: 'pkg-b' } }),
|
|
||||||
];
|
|
||||||
expect(deduplicateResults(servers)).toHaveLength(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('deduplicates by npm package name, keeps higher popularity', () => {
|
|
||||||
const servers = [
|
|
||||||
makeServer({ name: 'low', packages: { npm: '@test/slack' }, popularityScore: 10, sourceRegistry: 'official' }),
|
|
||||||
makeServer({ name: 'high', packages: { npm: '@test/slack' }, popularityScore: 100, sourceRegistry: 'smithery' }),
|
|
||||||
];
|
|
||||||
const result = deduplicateResults(servers);
|
|
||||||
expect(result).toHaveLength(1);
|
|
||||||
expect(result[0]?.name).toBe('high');
|
|
||||||
expect(result[0]?.popularityScore).toBe(100);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('deduplicates by GitHub URL with different formats', () => {
|
|
||||||
const servers = [
|
|
||||||
makeServer({ name: 'a', repositoryUrl: 'https://github.com/org/repo', popularityScore: 5 }),
|
|
||||||
makeServer({ name: 'b', repositoryUrl: 'git@github.com:org/repo.git', popularityScore: 50 }),
|
|
||||||
];
|
|
||||||
const result = deduplicateResults(servers);
|
|
||||||
expect(result).toHaveLength(1);
|
|
||||||
expect(result[0]?.name).toBe('b');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('merges envTemplate from both sources', () => {
|
|
||||||
const servers = [
|
|
||||||
makeServer({
|
|
||||||
name: 'a',
|
|
||||||
packages: { npm: 'pkg' },
|
|
||||||
envTemplate: [{ name: 'TOKEN', description: 'API token', isSecret: true }],
|
|
||||||
popularityScore: 10,
|
|
||||||
}),
|
|
||||||
makeServer({
|
|
||||||
name: 'b',
|
|
||||||
packages: { npm: 'pkg' },
|
|
||||||
envTemplate: [{ name: 'URL', description: 'Base URL', isSecret: false }],
|
|
||||||
popularityScore: 5,
|
|
||||||
}),
|
|
||||||
];
|
|
||||||
const result = deduplicateResults(servers);
|
|
||||||
expect(result).toHaveLength(1);
|
|
||||||
expect(result[0]?.envTemplate).toHaveLength(2);
|
|
||||||
expect(result[0]?.envTemplate.map((e) => e.name)).toContain('TOKEN');
|
|
||||||
expect(result[0]?.envTemplate.map((e) => e.name)).toContain('URL');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('deduplicates envTemplate by var name', () => {
|
|
||||||
const servers = [
|
|
||||||
makeServer({
|
|
||||||
packages: { npm: 'pkg' },
|
|
||||||
envTemplate: [{ name: 'TOKEN', description: 'from a', isSecret: true }],
|
|
||||||
popularityScore: 10,
|
|
||||||
}),
|
|
||||||
makeServer({
|
|
||||||
packages: { npm: 'pkg' },
|
|
||||||
envTemplate: [{ name: 'TOKEN', description: 'from b', isSecret: true }],
|
|
||||||
popularityScore: 5,
|
|
||||||
}),
|
|
||||||
];
|
|
||||||
const result = deduplicateResults(servers);
|
|
||||||
expect(result[0]?.envTemplate).toHaveLength(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('merges verified status (OR)', () => {
|
|
||||||
const servers = [
|
|
||||||
makeServer({ packages: { npm: 'pkg' }, verified: true, popularityScore: 10 }),
|
|
||||||
makeServer({ packages: { npm: 'pkg' }, verified: false, popularityScore: 5 }),
|
|
||||||
];
|
|
||||||
const result = deduplicateResults(servers);
|
|
||||||
expect(result[0]?.verified).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles servers with no npm or repo', () => {
|
|
||||||
const servers = [
|
|
||||||
makeServer({ name: 'a' }),
|
|
||||||
makeServer({ name: 'b' }),
|
|
||||||
];
|
|
||||||
// No matching key → no dedup
|
|
||||||
expect(deduplicateResults(servers)).toHaveLength(2);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,91 +0,0 @@
|
|||||||
import { describe, it, expect } from 'vitest';
|
|
||||||
import { rankResults } from '../../src/registry/ranking.js';
|
|
||||||
import type { RegistryServer } from '../../src/registry/types.js';
|
|
||||||
|
|
||||||
function makeServer(overrides: Partial<RegistryServer> = {}): RegistryServer {
|
|
||||||
return {
|
|
||||||
name: 'test-server',
|
|
||||||
description: 'A test server',
|
|
||||||
packages: {},
|
|
||||||
envTemplate: [],
|
|
||||||
transport: 'stdio',
|
|
||||||
popularityScore: 0,
|
|
||||||
verified: false,
|
|
||||||
sourceRegistry: 'official',
|
|
||||||
...overrides,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
describe('rankResults', () => {
|
|
||||||
it('puts exact name match first', () => {
|
|
||||||
const servers = [
|
|
||||||
makeServer({ name: 'slack-extended-tools' }),
|
|
||||||
makeServer({ name: 'slack' }),
|
|
||||||
makeServer({ name: 'my-slack-bot' }),
|
|
||||||
];
|
|
||||||
const ranked = rankResults(servers, 'slack');
|
|
||||||
expect(ranked[0]?.name).toBe('slack');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('ranks verified servers higher than unverified', () => {
|
|
||||||
const servers = [
|
|
||||||
makeServer({ name: 'server-a', verified: false }),
|
|
||||||
makeServer({ name: 'server-b', verified: true }),
|
|
||||||
];
|
|
||||||
const ranked = rankResults(servers, 'server');
|
|
||||||
expect(ranked[0]?.name).toBe('server-b');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('ranks popular servers higher', () => {
|
|
||||||
const servers = [
|
|
||||||
makeServer({ name: 'unpopular', popularityScore: 1 }),
|
|
||||||
makeServer({ name: 'popular', popularityScore: 10000 }),
|
|
||||||
];
|
|
||||||
const ranked = rankResults(servers, 'test');
|
|
||||||
expect(ranked[0]?.name).toBe('popular');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('considers recency', () => {
|
|
||||||
const recent = new Date();
|
|
||||||
const old = new Date(Date.now() - 365 * 24 * 60 * 60 * 1000);
|
|
||||||
const servers = [
|
|
||||||
makeServer({ name: 'old-server', lastUpdated: old }),
|
|
||||||
makeServer({ name: 'new-server', lastUpdated: recent }),
|
|
||||||
];
|
|
||||||
const ranked = rankResults(servers, 'test');
|
|
||||||
expect(ranked[0]?.name).toBe('new-server');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('handles missing lastUpdated gracefully', () => {
|
|
||||||
const servers = [
|
|
||||||
makeServer({ name: 'no-date' }),
|
|
||||||
makeServer({ name: 'has-date', lastUpdated: new Date() }),
|
|
||||||
];
|
|
||||||
// Should not throw
|
|
||||||
const ranked = rankResults(servers, 'test');
|
|
||||||
expect(ranked).toHaveLength(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('produces stable ordering for identical scores', () => {
|
|
||||||
const servers = Array.from({ length: 10 }, (_, i) =>
|
|
||||||
makeServer({ name: `server-${String(i)}` }),
|
|
||||||
);
|
|
||||||
const ranked1 = rankResults(servers, 'test');
|
|
||||||
const ranked2 = rankResults(servers, 'test');
|
|
||||||
expect(ranked1.map((s) => s.name)).toEqual(ranked2.map((s) => s.name));
|
|
||||||
});
|
|
||||||
|
|
||||||
it('returns empty array for empty input', () => {
|
|
||||||
expect(rankResults([], 'test')).toEqual([]);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('does not mutate original array', () => {
|
|
||||||
const servers = [
|
|
||||||
makeServer({ name: 'b' }),
|
|
||||||
makeServer({ name: 'a' }),
|
|
||||||
];
|
|
||||||
const original = [...servers];
|
|
||||||
rankResults(servers, 'test');
|
|
||||||
expect(servers.map((s) => s.name)).toEqual(original.map((s) => s.name));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,204 +0,0 @@
|
|||||||
-- CreateEnum
|
|
||||||
CREATE TYPE "Role" AS ENUM ('USER', 'ADMIN');
|
|
||||||
|
|
||||||
-- CreateEnum
|
|
||||||
CREATE TYPE "Transport" AS ENUM ('STDIO', 'SSE', 'STREAMABLE_HTTP');
|
|
||||||
|
|
||||||
-- CreateEnum
|
|
||||||
CREATE TYPE "InstanceStatus" AS ENUM ('STARTING', 'RUNNING', 'STOPPING', 'STOPPED', 'ERROR');
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "User" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"email" TEXT NOT NULL,
|
|
||||||
"name" TEXT,
|
|
||||||
"passwordHash" TEXT NOT NULL,
|
|
||||||
"role" "Role" NOT NULL DEFAULT 'USER',
|
|
||||||
"version" INTEGER NOT NULL DEFAULT 1,
|
|
||||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
|
||||||
|
|
||||||
CONSTRAINT "User_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "Session" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"token" TEXT NOT NULL,
|
|
||||||
"userId" TEXT NOT NULL,
|
|
||||||
"expiresAt" TIMESTAMP(3) NOT NULL,
|
|
||||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
|
|
||||||
CONSTRAINT "Session_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "McpServer" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"name" TEXT NOT NULL,
|
|
||||||
"description" TEXT NOT NULL DEFAULT '',
|
|
||||||
"packageName" TEXT,
|
|
||||||
"dockerImage" TEXT,
|
|
||||||
"transport" "Transport" NOT NULL DEFAULT 'STDIO',
|
|
||||||
"repositoryUrl" TEXT,
|
|
||||||
"externalUrl" TEXT,
|
|
||||||
"command" JSONB,
|
|
||||||
"containerPort" INTEGER,
|
|
||||||
"envTemplate" JSONB NOT NULL DEFAULT '[]',
|
|
||||||
"version" INTEGER NOT NULL DEFAULT 1,
|
|
||||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
|
||||||
|
|
||||||
CONSTRAINT "McpServer_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "McpProfile" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"name" TEXT NOT NULL,
|
|
||||||
"serverId" TEXT NOT NULL,
|
|
||||||
"permissions" JSONB NOT NULL DEFAULT '[]',
|
|
||||||
"envOverrides" JSONB NOT NULL DEFAULT '{}',
|
|
||||||
"version" INTEGER NOT NULL DEFAULT 1,
|
|
||||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
|
||||||
|
|
||||||
CONSTRAINT "McpProfile_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "Project" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"name" TEXT NOT NULL,
|
|
||||||
"description" TEXT NOT NULL DEFAULT '',
|
|
||||||
"ownerId" TEXT NOT NULL,
|
|
||||||
"version" INTEGER NOT NULL DEFAULT 1,
|
|
||||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
|
||||||
|
|
||||||
CONSTRAINT "Project_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "ProjectMcpProfile" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"projectId" TEXT NOT NULL,
|
|
||||||
"profileId" TEXT NOT NULL,
|
|
||||||
|
|
||||||
CONSTRAINT "ProjectMcpProfile_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "McpInstance" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"serverId" TEXT NOT NULL,
|
|
||||||
"containerId" TEXT,
|
|
||||||
"status" "InstanceStatus" NOT NULL DEFAULT 'STOPPED',
|
|
||||||
"port" INTEGER,
|
|
||||||
"metadata" JSONB NOT NULL DEFAULT '{}',
|
|
||||||
"version" INTEGER NOT NULL DEFAULT 1,
|
|
||||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
|
||||||
|
|
||||||
CONSTRAINT "McpInstance_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateTable
|
|
||||||
CREATE TABLE "AuditLog" (
|
|
||||||
"id" TEXT NOT NULL,
|
|
||||||
"userId" TEXT NOT NULL,
|
|
||||||
"action" TEXT NOT NULL,
|
|
||||||
"resource" TEXT NOT NULL,
|
|
||||||
"resourceId" TEXT,
|
|
||||||
"details" JSONB NOT NULL DEFAULT '{}',
|
|
||||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
|
|
||||||
CONSTRAINT "AuditLog_pkey" PRIMARY KEY ("id")
|
|
||||||
);
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "User_email_idx" ON "User"("email");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "Session_token_key" ON "Session"("token");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "Session_token_idx" ON "Session"("token");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "Session_userId_idx" ON "Session"("userId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "Session_expiresAt_idx" ON "Session"("expiresAt");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "McpServer_name_key" ON "McpServer"("name");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "McpServer_name_idx" ON "McpServer"("name");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "McpProfile_serverId_idx" ON "McpProfile"("serverId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "McpProfile_name_serverId_key" ON "McpProfile"("name", "serverId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "Project_name_key" ON "Project"("name");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "Project_name_idx" ON "Project"("name");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "Project_ownerId_idx" ON "Project"("ownerId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "ProjectMcpProfile_projectId_idx" ON "ProjectMcpProfile"("projectId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "ProjectMcpProfile_profileId_idx" ON "ProjectMcpProfile"("profileId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE UNIQUE INDEX "ProjectMcpProfile_projectId_profileId_key" ON "ProjectMcpProfile"("projectId", "profileId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "McpInstance_serverId_idx" ON "McpInstance"("serverId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "McpInstance_status_idx" ON "McpInstance"("status");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "AuditLog_userId_idx" ON "AuditLog"("userId");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "AuditLog_action_idx" ON "AuditLog"("action");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "AuditLog_resource_idx" ON "AuditLog"("resource");
|
|
||||||
|
|
||||||
-- CreateIndex
|
|
||||||
CREATE INDEX "AuditLog_createdAt_idx" ON "AuditLog"("createdAt");
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "Session" ADD CONSTRAINT "Session_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "McpProfile" ADD CONSTRAINT "McpProfile_serverId_fkey" FOREIGN KEY ("serverId") REFERENCES "McpServer"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "Project" ADD CONSTRAINT "Project_ownerId_fkey" FOREIGN KEY ("ownerId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "ProjectMcpProfile" ADD CONSTRAINT "ProjectMcpProfile_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "ProjectMcpProfile" ADD CONSTRAINT "ProjectMcpProfile_profileId_fkey" FOREIGN KEY ("profileId") REFERENCES "McpProfile"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "McpInstance" ADD CONSTRAINT "McpInstance_serverId_fkey" FOREIGN KEY ("serverId") REFERENCES "McpServer"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
|
|
||||||
-- AddForeignKey
|
|
||||||
ALTER TABLE "AuditLog" ADD CONSTRAINT "AuditLog_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
-- DropForeignKey
|
|
||||||
ALTER TABLE "ProjectMember" DROP CONSTRAINT IF EXISTS "ProjectMember_projectId_fkey";
|
|
||||||
|
|
||||||
-- DropForeignKey
|
|
||||||
ALTER TABLE "ProjectMember" DROP CONSTRAINT IF EXISTS "ProjectMember_userId_fkey";
|
|
||||||
|
|
||||||
-- DropTable
|
|
||||||
DROP TABLE IF EXISTS "ProjectMember";
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
# Please do not edit this file manually
|
|
||||||
# It should be added in your version-control system (e.g., Git)
|
|
||||||
provider = "postgresql"
|
|
||||||
@@ -10,21 +10,17 @@ datasource db {
|
|||||||
// ── Users ──
|
// ── Users ──
|
||||||
|
|
||||||
model User {
|
model User {
|
||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
email String @unique
|
email String @unique
|
||||||
name String?
|
name String?
|
||||||
passwordHash String
|
role Role @default(USER)
|
||||||
role Role @default(USER)
|
version Int @default(1)
|
||||||
provider String?
|
createdAt DateTime @default(now())
|
||||||
externalId String?
|
updatedAt DateTime @updatedAt
|
||||||
version Int @default(1)
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
updatedAt DateTime @updatedAt
|
|
||||||
|
|
||||||
sessions Session[]
|
sessions Session[]
|
||||||
auditLogs AuditLog[]
|
auditLogs AuditLog[]
|
||||||
ownedProjects Project[]
|
projects Project[]
|
||||||
groupMemberships GroupMember[]
|
|
||||||
|
|
||||||
@@index([email])
|
@@index([email])
|
||||||
}
|
}
|
||||||
@@ -60,21 +56,13 @@ model McpServer {
|
|||||||
dockerImage String?
|
dockerImage String?
|
||||||
transport Transport @default(STDIO)
|
transport Transport @default(STDIO)
|
||||||
repositoryUrl String?
|
repositoryUrl String?
|
||||||
externalUrl String?
|
envTemplate Json @default("[]")
|
||||||
command Json?
|
|
||||||
containerPort Int?
|
|
||||||
replicas Int @default(1)
|
|
||||||
env Json @default("[]")
|
|
||||||
healthCheck Json?
|
|
||||||
version Int @default(1)
|
version Int @default(1)
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
templateName String?
|
profiles McpProfile[]
|
||||||
templateVersion String?
|
|
||||||
|
|
||||||
instances McpInstance[]
|
instances McpInstance[]
|
||||||
projects ProjectServer[]
|
|
||||||
|
|
||||||
@@index([name])
|
@@index([name])
|
||||||
}
|
}
|
||||||
@@ -85,83 +73,23 @@ enum Transport {
|
|||||||
STREAMABLE_HTTP
|
STREAMABLE_HTTP
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── MCP Templates ──
|
// ── MCP Profiles ──
|
||||||
|
|
||||||
model McpTemplate {
|
model McpProfile {
|
||||||
id String @id @default(cuid())
|
|
||||||
name String @unique
|
|
||||||
version String @default("1.0.0")
|
|
||||||
description String @default("")
|
|
||||||
packageName String?
|
|
||||||
dockerImage String?
|
|
||||||
transport Transport @default(STDIO)
|
|
||||||
repositoryUrl String?
|
|
||||||
externalUrl String?
|
|
||||||
command Json?
|
|
||||||
containerPort Int?
|
|
||||||
replicas Int @default(1)
|
|
||||||
env Json @default("[]")
|
|
||||||
healthCheck Json?
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
updatedAt DateTime @updatedAt
|
|
||||||
|
|
||||||
@@index([name])
|
|
||||||
}
|
|
||||||
|
|
||||||
// ── Secrets ──
|
|
||||||
|
|
||||||
model Secret {
|
|
||||||
id String @id @default(cuid())
|
|
||||||
name String @unique
|
|
||||||
data Json @default("{}")
|
|
||||||
version Int @default(1)
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
updatedAt DateTime @updatedAt
|
|
||||||
|
|
||||||
@@index([name])
|
|
||||||
}
|
|
||||||
|
|
||||||
// ── Groups ──
|
|
||||||
|
|
||||||
model Group {
|
|
||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
name String @unique
|
name String
|
||||||
description String @default("")
|
serverId String
|
||||||
|
permissions Json @default("[]")
|
||||||
|
envOverrides Json @default("{}")
|
||||||
version Int @default(1)
|
version Int @default(1)
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
members GroupMember[]
|
server McpServer @relation(fields: [serverId], references: [id], onDelete: Cascade)
|
||||||
|
projects ProjectMcpProfile[]
|
||||||
|
|
||||||
@@index([name])
|
@@unique([name, serverId])
|
||||||
}
|
@@index([serverId])
|
||||||
|
|
||||||
model GroupMember {
|
|
||||||
id String @id @default(cuid())
|
|
||||||
groupId String
|
|
||||||
userId String
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
|
|
||||||
group Group @relation(fields: [groupId], references: [id], onDelete: Cascade)
|
|
||||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
|
||||||
|
|
||||||
@@unique([groupId, userId])
|
|
||||||
@@index([groupId])
|
|
||||||
@@index([userId])
|
|
||||||
}
|
|
||||||
|
|
||||||
// ── RBAC Definitions ──
|
|
||||||
|
|
||||||
model RbacDefinition {
|
|
||||||
id String @id @default(cuid())
|
|
||||||
name String @unique
|
|
||||||
subjects Json @default("[]")
|
|
||||||
roleBindings Json @default("[]")
|
|
||||||
version Int @default(1)
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
updatedAt DateTime @updatedAt
|
|
||||||
|
|
||||||
@@index([name])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── Projects ──
|
// ── Projects ──
|
||||||
@@ -170,31 +98,31 @@ model Project {
|
|||||||
id String @id @default(cuid())
|
id String @id @default(cuid())
|
||||||
name String @unique
|
name String @unique
|
||||||
description String @default("")
|
description String @default("")
|
||||||
proxyMode String @default("direct")
|
|
||||||
llmProvider String?
|
|
||||||
llmModel String?
|
|
||||||
ownerId String
|
ownerId String
|
||||||
version Int @default(1)
|
version Int @default(1)
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
updatedAt DateTime @updatedAt
|
updatedAt DateTime @updatedAt
|
||||||
|
|
||||||
owner User @relation(fields: [ownerId], references: [id], onDelete: Cascade)
|
owner User @relation(fields: [ownerId], references: [id], onDelete: Cascade)
|
||||||
servers ProjectServer[]
|
profiles ProjectMcpProfile[]
|
||||||
|
|
||||||
@@index([name])
|
@@index([name])
|
||||||
@@index([ownerId])
|
@@index([ownerId])
|
||||||
}
|
}
|
||||||
|
|
||||||
model ProjectServer {
|
// ── Project <-> Profile join table ──
|
||||||
id String @id @default(cuid())
|
|
||||||
|
model ProjectMcpProfile {
|
||||||
|
id String @id @default(cuid())
|
||||||
projectId String
|
projectId String
|
||||||
serverId String
|
profileId String
|
||||||
createdAt DateTime @default(now())
|
|
||||||
|
|
||||||
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
project Project @relation(fields: [projectId], references: [id], onDelete: Cascade)
|
||||||
server McpServer @relation(fields: [serverId], references: [id], onDelete: Cascade)
|
profile McpProfile @relation(fields: [profileId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
@@unique([projectId, serverId])
|
@@unique([projectId, profileId])
|
||||||
|
@@index([projectId])
|
||||||
|
@@index([profileId])
|
||||||
}
|
}
|
||||||
|
|
||||||
// ── MCP Instances (running containers) ──
|
// ── MCP Instances (running containers) ──
|
||||||
@@ -205,13 +133,10 @@ model McpInstance {
|
|||||||
containerId String?
|
containerId String?
|
||||||
status InstanceStatus @default(STOPPED)
|
status InstanceStatus @default(STOPPED)
|
||||||
port Int?
|
port Int?
|
||||||
metadata Json @default("{}")
|
metadata Json @default("{}")
|
||||||
healthStatus String?
|
version Int @default(1)
|
||||||
lastHealthCheck DateTime?
|
createdAt DateTime @default(now())
|
||||||
events Json @default("[]")
|
updatedAt DateTime @updatedAt
|
||||||
version Int @default(1)
|
|
||||||
createdAt DateTime @default(now())
|
|
||||||
updatedAt DateTime @updatedAt
|
|
||||||
|
|
||||||
server McpServer @relation(fields: [serverId], references: [id], onDelete: Cascade)
|
server McpServer @relation(fields: [serverId], references: [id], onDelete: Cascade)
|
||||||
|
|
||||||
|
|||||||
@@ -4,9 +4,9 @@ export type {
|
|||||||
User,
|
User,
|
||||||
Session,
|
Session,
|
||||||
McpServer,
|
McpServer,
|
||||||
McpTemplate,
|
McpProfile,
|
||||||
Secret,
|
|
||||||
Project,
|
Project,
|
||||||
|
ProjectMcpProfile,
|
||||||
McpInstance,
|
McpInstance,
|
||||||
AuditLog,
|
AuditLog,
|
||||||
Role,
|
Role,
|
||||||
@@ -14,5 +14,5 @@ export type {
|
|||||||
InstanceStatus,
|
InstanceStatus,
|
||||||
} from '@prisma/client';
|
} from '@prisma/client';
|
||||||
|
|
||||||
export { seedTemplates } from './seed/index.js';
|
export { seedMcpServers, defaultServers } from './seed/index.js';
|
||||||
export type { SeedTemplate, TemplateEnvEntry, HealthCheckSpec } from './seed/index.js';
|
export type { SeedServer } from './seed/index.js';
|
||||||
|
|||||||
@@ -1,77 +1,131 @@
|
|||||||
import { PrismaClient, Prisma } from '@prisma/client';
|
import { PrismaClient } from '@prisma/client';
|
||||||
|
|
||||||
export interface TemplateEnvEntry {
|
export interface SeedServer {
|
||||||
name: string;
|
name: string;
|
||||||
description?: string;
|
|
||||||
required?: boolean;
|
|
||||||
defaultValue?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface HealthCheckSpec {
|
|
||||||
tool: string;
|
|
||||||
arguments?: Record<string, unknown>;
|
|
||||||
intervalSeconds?: number;
|
|
||||||
timeoutSeconds?: number;
|
|
||||||
failureThreshold?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface SeedTemplate {
|
|
||||||
name: string;
|
|
||||||
version: string;
|
|
||||||
description: string;
|
description: string;
|
||||||
packageName?: string;
|
packageName: string;
|
||||||
dockerImage?: string;
|
|
||||||
transport: 'STDIO' | 'SSE' | 'STREAMABLE_HTTP';
|
transport: 'STDIO' | 'SSE' | 'STREAMABLE_HTTP';
|
||||||
repositoryUrl?: string;
|
repositoryUrl: string;
|
||||||
externalUrl?: string;
|
envTemplate: Array<{
|
||||||
command?: string[];
|
name: string;
|
||||||
containerPort?: number;
|
description: string;
|
||||||
replicas?: number;
|
isSecret: boolean;
|
||||||
env?: TemplateEnvEntry[];
|
setupUrl?: string;
|
||||||
healthCheck?: HealthCheckSpec;
|
}>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function seedTemplates(
|
export const defaultServers: SeedServer[] = [
|
||||||
prisma: PrismaClient,
|
{
|
||||||
templates: SeedTemplate[],
|
name: 'slack',
|
||||||
): Promise<number> {
|
description: 'Slack MCP server for reading channels, messages, and user info',
|
||||||
let upserted = 0;
|
packageName: '@anthropic/slack-mcp',
|
||||||
|
transport: 'STDIO',
|
||||||
|
repositoryUrl: 'https://github.com/modelcontextprotocol/servers/tree/main/src/slack',
|
||||||
|
envTemplate: [
|
||||||
|
{
|
||||||
|
name: 'SLACK_BOT_TOKEN',
|
||||||
|
description: 'Slack Bot User OAuth Token (xoxb-...)',
|
||||||
|
isSecret: true,
|
||||||
|
setupUrl: 'https://api.slack.com/apps',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'SLACK_TEAM_ID',
|
||||||
|
description: 'Slack Workspace Team ID',
|
||||||
|
isSecret: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'jira',
|
||||||
|
description: 'Jira MCP server for issues, projects, and boards',
|
||||||
|
packageName: '@anthropic/jira-mcp',
|
||||||
|
transport: 'STDIO',
|
||||||
|
repositoryUrl: 'https://github.com/modelcontextprotocol/servers/tree/main/src/jira',
|
||||||
|
envTemplate: [
|
||||||
|
{
|
||||||
|
name: 'JIRA_URL',
|
||||||
|
description: 'Jira instance URL (e.g., https://company.atlassian.net)',
|
||||||
|
isSecret: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'JIRA_EMAIL',
|
||||||
|
description: 'Jira account email',
|
||||||
|
isSecret: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'JIRA_API_TOKEN',
|
||||||
|
description: 'Jira API token',
|
||||||
|
isSecret: true,
|
||||||
|
setupUrl: 'https://id.atlassian.com/manage-profile/security/api-tokens',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'github',
|
||||||
|
description: 'GitHub MCP server for repos, issues, PRs, and code search',
|
||||||
|
packageName: '@anthropic/github-mcp',
|
||||||
|
transport: 'STDIO',
|
||||||
|
repositoryUrl: 'https://github.com/modelcontextprotocol/servers/tree/main/src/github',
|
||||||
|
envTemplate: [
|
||||||
|
{
|
||||||
|
name: 'GITHUB_TOKEN',
|
||||||
|
description: 'GitHub Personal Access Token',
|
||||||
|
isSecret: true,
|
||||||
|
setupUrl: 'https://github.com/settings/tokens',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'terraform',
|
||||||
|
description: 'Terraform MCP server for infrastructure documentation and state',
|
||||||
|
packageName: '@anthropic/terraform-mcp',
|
||||||
|
transport: 'STDIO',
|
||||||
|
repositoryUrl: 'https://github.com/modelcontextprotocol/servers/tree/main/src/terraform',
|
||||||
|
envTemplate: [],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
for (const tpl of templates) {
|
export async function seedMcpServers(
|
||||||
await prisma.mcpTemplate.upsert({
|
prisma: PrismaClient,
|
||||||
where: { name: tpl.name },
|
servers: SeedServer[] = defaultServers,
|
||||||
|
): Promise<number> {
|
||||||
|
let created = 0;
|
||||||
|
|
||||||
|
for (const server of servers) {
|
||||||
|
await prisma.mcpServer.upsert({
|
||||||
|
where: { name: server.name },
|
||||||
update: {
|
update: {
|
||||||
version: tpl.version,
|
description: server.description,
|
||||||
description: tpl.description,
|
packageName: server.packageName,
|
||||||
packageName: tpl.packageName ?? null,
|
transport: server.transport,
|
||||||
dockerImage: tpl.dockerImage ?? null,
|
repositoryUrl: server.repositoryUrl,
|
||||||
transport: tpl.transport,
|
envTemplate: server.envTemplate,
|
||||||
repositoryUrl: tpl.repositoryUrl ?? null,
|
|
||||||
externalUrl: tpl.externalUrl ?? null,
|
|
||||||
command: (tpl.command ?? Prisma.JsonNull) as Prisma.InputJsonValue,
|
|
||||||
containerPort: tpl.containerPort ?? null,
|
|
||||||
replicas: tpl.replicas ?? 1,
|
|
||||||
env: (tpl.env ?? []) as unknown as Prisma.InputJsonValue,
|
|
||||||
healthCheck: (tpl.healthCheck ?? Prisma.JsonNull) as unknown as Prisma.InputJsonValue,
|
|
||||||
},
|
},
|
||||||
create: {
|
create: {
|
||||||
name: tpl.name,
|
name: server.name,
|
||||||
version: tpl.version,
|
description: server.description,
|
||||||
description: tpl.description,
|
packageName: server.packageName,
|
||||||
packageName: tpl.packageName ?? null,
|
transport: server.transport,
|
||||||
dockerImage: tpl.dockerImage ?? null,
|
repositoryUrl: server.repositoryUrl,
|
||||||
transport: tpl.transport,
|
envTemplate: server.envTemplate,
|
||||||
repositoryUrl: tpl.repositoryUrl ?? null,
|
|
||||||
externalUrl: tpl.externalUrl ?? null,
|
|
||||||
command: (tpl.command ?? Prisma.JsonNull) as Prisma.InputJsonValue,
|
|
||||||
containerPort: tpl.containerPort ?? null,
|
|
||||||
replicas: tpl.replicas ?? 1,
|
|
||||||
env: (tpl.env ?? []) as unknown as Prisma.InputJsonValue,
|
|
||||||
healthCheck: (tpl.healthCheck ?? Prisma.JsonNull) as unknown as Prisma.InputJsonValue,
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
upserted++;
|
created++;
|
||||||
}
|
}
|
||||||
|
|
||||||
return upserted;
|
return created;
|
||||||
|
}
|
||||||
|
|
||||||
|
// CLI entry point
|
||||||
|
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||||
|
const prisma = new PrismaClient();
|
||||||
|
seedMcpServers(prisma)
|
||||||
|
.then((count) => {
|
||||||
|
console.log(`Seeded ${count} MCP servers`);
|
||||||
|
return prisma.$disconnect();
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
console.error(e);
|
||||||
|
return prisma.$disconnect().then(() => process.exit(1));
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -48,16 +48,11 @@ export async function cleanupTestDb(): Promise<void> {
|
|||||||
export async function clearAllTables(client: PrismaClient): Promise<void> {
|
export async function clearAllTables(client: PrismaClient): Promise<void> {
|
||||||
// Delete in order respecting foreign keys
|
// Delete in order respecting foreign keys
|
||||||
await client.auditLog.deleteMany();
|
await client.auditLog.deleteMany();
|
||||||
|
await client.projectMcpProfile.deleteMany();
|
||||||
await client.mcpInstance.deleteMany();
|
await client.mcpInstance.deleteMany();
|
||||||
await client.projectServer.deleteMany();
|
await client.mcpProfile.deleteMany();
|
||||||
await client.projectMember.deleteMany();
|
|
||||||
await client.secret.deleteMany();
|
|
||||||
await client.session.deleteMany();
|
await client.session.deleteMany();
|
||||||
await client.project.deleteMany();
|
await client.project.deleteMany();
|
||||||
await client.mcpServer.deleteMany();
|
await client.mcpServer.deleteMany();
|
||||||
await client.mcpTemplate.deleteMany();
|
|
||||||
await client.groupMember.deleteMany();
|
|
||||||
await client.group.deleteMany();
|
|
||||||
await client.rbacDefinition.deleteMany();
|
|
||||||
await client.user.deleteMany();
|
await client.user.deleteMany();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,35 +23,11 @@ async function createUser(overrides: { email?: string; name?: string; role?: 'US
|
|||||||
data: {
|
data: {
|
||||||
email: overrides.email ?? `test-${Date.now()}@example.com`,
|
email: overrides.email ?? `test-${Date.now()}@example.com`,
|
||||||
name: overrides.name ?? 'Test User',
|
name: overrides.name ?? 'Test User',
|
||||||
passwordHash: '$2b$10$test-hash-placeholder',
|
|
||||||
role: overrides.role ?? 'USER',
|
role: overrides.role ?? 'USER',
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createGroup(overrides: { name?: string; description?: string } = {}) {
|
|
||||||
return prisma.group.create({
|
|
||||||
data: {
|
|
||||||
name: overrides.name ?? `group-${Date.now()}`,
|
|
||||||
description: overrides.description ?? 'Test group',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function createProject(overrides: { name?: string; ownerId?: string } = {}) {
|
|
||||||
let ownerId = overrides.ownerId;
|
|
||||||
if (!ownerId) {
|
|
||||||
const user = await createUser();
|
|
||||||
ownerId = user.id;
|
|
||||||
}
|
|
||||||
return prisma.project.create({
|
|
||||||
data: {
|
|
||||||
name: overrides.name ?? `project-${Date.now()}`,
|
|
||||||
ownerId,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function createServer(overrides: { name?: string; transport?: 'STDIO' | 'SSE' | 'STREAMABLE_HTTP' } = {}) {
|
async function createServer(overrides: { name?: string; transport?: 'STDIO' | 'SSE' | 'STREAMABLE_HTTP' } = {}) {
|
||||||
return prisma.mcpServer.create({
|
return prisma.mcpServer.create({
|
||||||
data: {
|
data: {
|
||||||
@@ -147,7 +123,7 @@ describe('McpServer', () => {
|
|||||||
const server = await createServer();
|
const server = await createServer();
|
||||||
expect(server.transport).toBe('STDIO');
|
expect(server.transport).toBe('STDIO');
|
||||||
expect(server.version).toBe(1);
|
expect(server.version).toBe(1);
|
||||||
expect(server.env).toEqual([]);
|
expect(server.envTemplate).toEqual([]);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('enforces unique name', async () => {
|
it('enforces unique name', async () => {
|
||||||
@@ -155,18 +131,18 @@ describe('McpServer', () => {
|
|||||||
await expect(createServer({ name: 'slack' })).rejects.toThrow();
|
await expect(createServer({ name: 'slack' })).rejects.toThrow();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('stores env as JSON', async () => {
|
it('stores envTemplate as JSON', async () => {
|
||||||
const server = await prisma.mcpServer.create({
|
const server = await prisma.mcpServer.create({
|
||||||
data: {
|
data: {
|
||||||
name: 'with-env',
|
name: 'with-env',
|
||||||
env: [
|
envTemplate: [
|
||||||
{ name: 'API_KEY', value: 'test-key' },
|
{ name: 'API_KEY', description: 'Key', isSecret: true },
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const env = server.env as Array<{ name: string }>;
|
const envTemplate = server.envTemplate as Array<{ name: string }>;
|
||||||
expect(env).toHaveLength(1);
|
expect(envTemplate).toHaveLength(1);
|
||||||
expect(env[0].name).toBe('API_KEY');
|
expect(envTemplate[0].name).toBe('API_KEY');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('supports SSE transport', async () => {
|
it('supports SSE transport', async () => {
|
||||||
@@ -175,46 +151,43 @@ describe('McpServer', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// ── Secret model ──
|
// ── McpProfile model ──
|
||||||
|
|
||||||
describe('Secret', () => {
|
describe('McpProfile', () => {
|
||||||
it('creates a secret with defaults', async () => {
|
it('creates a profile linked to server', async () => {
|
||||||
const secret = await prisma.secret.create({
|
const server = await createServer();
|
||||||
data: { name: 'my-secret' },
|
const profile = await prisma.mcpProfile.create({
|
||||||
});
|
|
||||||
expect(secret.name).toBe('my-secret');
|
|
||||||
expect(secret.data).toEqual({});
|
|
||||||
expect(secret.version).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores key-value data as JSON', async () => {
|
|
||||||
const secret = await prisma.secret.create({
|
|
||||||
data: {
|
data: {
|
||||||
name: 'api-keys',
|
name: 'readonly',
|
||||||
data: { API_KEY: 'test-key', API_SECRET: 'test-secret' },
|
serverId: server.id,
|
||||||
|
permissions: ['read'],
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
const data = secret.data as Record<string, string>;
|
expect(profile.name).toBe('readonly');
|
||||||
expect(data['API_KEY']).toBe('test-key');
|
expect(profile.serverId).toBe(server.id);
|
||||||
expect(data['API_SECRET']).toBe('test-secret');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
it('enforces unique name', async () => {
|
it('enforces unique name per server', async () => {
|
||||||
await prisma.secret.create({ data: { name: 'dup-secret' } });
|
const server = await createServer();
|
||||||
await expect(prisma.secret.create({ data: { name: 'dup-secret' } })).rejects.toThrow();
|
const data = { name: 'default', serverId: server.id };
|
||||||
|
await prisma.mcpProfile.create({ data });
|
||||||
|
await expect(prisma.mcpProfile.create({ data })).rejects.toThrow();
|
||||||
});
|
});
|
||||||
|
|
||||||
it('updates data', async () => {
|
it('allows same profile name on different servers', async () => {
|
||||||
const secret = await prisma.secret.create({
|
const server1 = await createServer({ name: 'server-1' });
|
||||||
data: { name: 'updatable', data: { KEY: 'old' } },
|
const server2 = await createServer({ name: 'server-2' });
|
||||||
});
|
await prisma.mcpProfile.create({ data: { name: 'default', serverId: server1.id } });
|
||||||
const updated = await prisma.secret.update({
|
const profile2 = await prisma.mcpProfile.create({ data: { name: 'default', serverId: server2.id } });
|
||||||
where: { id: secret.id },
|
expect(profile2.name).toBe('default');
|
||||||
data: { data: { KEY: 'new', EXTRA: 'added' } },
|
});
|
||||||
});
|
|
||||||
const data = updated.data as Record<string, string>;
|
it('cascades delete when server is deleted', async () => {
|
||||||
expect(data['KEY']).toBe('new');
|
const server = await createServer();
|
||||||
expect(data['EXTRA']).toBe('added');
|
await prisma.mcpProfile.create({ data: { name: 'test', serverId: server.id } });
|
||||||
|
await prisma.mcpServer.delete({ where: { id: server.id } });
|
||||||
|
const profiles = await prisma.mcpProfile.findMany({ where: { serverId: server.id } });
|
||||||
|
expect(profiles).toHaveLength(0);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -247,6 +220,62 @@ describe('Project', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// ── ProjectMcpProfile (join table) ──
|
||||||
|
|
||||||
|
describe('ProjectMcpProfile', () => {
|
||||||
|
it('links project to profile', async () => {
|
||||||
|
const user = await createUser();
|
||||||
|
const server = await createServer();
|
||||||
|
const profile = await prisma.mcpProfile.create({
|
||||||
|
data: { name: 'default', serverId: server.id },
|
||||||
|
});
|
||||||
|
const project = await prisma.project.create({
|
||||||
|
data: { name: 'test-project', ownerId: user.id },
|
||||||
|
});
|
||||||
|
|
||||||
|
const link = await prisma.projectMcpProfile.create({
|
||||||
|
data: { projectId: project.id, profileId: profile.id },
|
||||||
|
});
|
||||||
|
expect(link.projectId).toBe(project.id);
|
||||||
|
expect(link.profileId).toBe(profile.id);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('enforces unique project+profile combination', async () => {
|
||||||
|
const user = await createUser();
|
||||||
|
const server = await createServer();
|
||||||
|
const profile = await prisma.mcpProfile.create({
|
||||||
|
data: { name: 'default', serverId: server.id },
|
||||||
|
});
|
||||||
|
const project = await prisma.project.create({
|
||||||
|
data: { name: 'test-project', ownerId: user.id },
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = { projectId: project.id, profileId: profile.id };
|
||||||
|
await prisma.projectMcpProfile.create({ data });
|
||||||
|
await expect(prisma.projectMcpProfile.create({ data })).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('loads profiles through project include', async () => {
|
||||||
|
const user = await createUser();
|
||||||
|
const server = await createServer();
|
||||||
|
const profile = await prisma.mcpProfile.create({
|
||||||
|
data: { name: 'slack-ro', serverId: server.id },
|
||||||
|
});
|
||||||
|
const project = await prisma.project.create({
|
||||||
|
data: { name: 'reports', ownerId: user.id },
|
||||||
|
});
|
||||||
|
await prisma.projectMcpProfile.create({
|
||||||
|
data: { projectId: project.id, profileId: profile.id },
|
||||||
|
});
|
||||||
|
|
||||||
|
const loaded = await prisma.project.findUnique({
|
||||||
|
where: { id: project.id },
|
||||||
|
include: { profiles: { include: { profile: true } } },
|
||||||
|
});
|
||||||
|
expect(loaded!.profiles).toHaveLength(1);
|
||||||
|
expect(loaded!.profiles[0].profile.name).toBe('slack-ro');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
// ── McpInstance model ──
|
// ── McpInstance model ──
|
||||||
|
|
||||||
@@ -333,236 +362,3 @@ describe('AuditLog', () => {
|
|||||||
expect(logs).toHaveLength(0);
|
expect(logs).toHaveLength(0);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// ── User SSO fields ──
|
|
||||||
|
|
||||||
describe('User SSO fields', () => {
|
|
||||||
it('stores provider and externalId', async () => {
|
|
||||||
const user = await prisma.user.create({
|
|
||||||
data: {
|
|
||||||
email: 'sso@example.com',
|
|
||||||
passwordHash: 'hash',
|
|
||||||
provider: 'oidc',
|
|
||||||
externalId: 'ext-123',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
expect(user.provider).toBe('oidc');
|
|
||||||
expect(user.externalId).toBe('ext-123');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('defaults provider and externalId to null', async () => {
|
|
||||||
const user = await createUser();
|
|
||||||
expect(user.provider).toBeNull();
|
|
||||||
expect(user.externalId).toBeNull();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// ── Group model ──
|
|
||||||
|
|
||||||
describe('Group', () => {
|
|
||||||
it('creates a group with defaults', async () => {
|
|
||||||
const group = await createGroup();
|
|
||||||
expect(group.id).toBeDefined();
|
|
||||||
expect(group.version).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('enforces unique name', async () => {
|
|
||||||
await createGroup({ name: 'devs' });
|
|
||||||
await expect(createGroup({ name: 'devs' })).rejects.toThrow();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('creates group members', async () => {
|
|
||||||
const group = await createGroup();
|
|
||||||
const user = await createUser();
|
|
||||||
const member = await prisma.groupMember.create({
|
|
||||||
data: { groupId: group.id, userId: user.id },
|
|
||||||
});
|
|
||||||
expect(member.groupId).toBe(group.id);
|
|
||||||
expect(member.userId).toBe(user.id);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('enforces unique group-user pair', async () => {
|
|
||||||
const group = await createGroup();
|
|
||||||
const user = await createUser();
|
|
||||||
await prisma.groupMember.create({ data: { groupId: group.id, userId: user.id } });
|
|
||||||
await expect(
|
|
||||||
prisma.groupMember.create({ data: { groupId: group.id, userId: user.id } }),
|
|
||||||
).rejects.toThrow();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('cascades delete when group is deleted', async () => {
|
|
||||||
const group = await createGroup();
|
|
||||||
const user = await createUser();
|
|
||||||
await prisma.groupMember.create({ data: { groupId: group.id, userId: user.id } });
|
|
||||||
await prisma.group.delete({ where: { id: group.id } });
|
|
||||||
const members = await prisma.groupMember.findMany({ where: { groupId: group.id } });
|
|
||||||
expect(members).toHaveLength(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// ── RbacDefinition model ──
|
|
||||||
|
|
||||||
describe('RbacDefinition', () => {
|
|
||||||
it('creates with defaults', async () => {
|
|
||||||
const rbac = await prisma.rbacDefinition.create({
|
|
||||||
data: { name: 'test-rbac' },
|
|
||||||
});
|
|
||||||
expect(rbac.subjects).toEqual([]);
|
|
||||||
expect(rbac.roleBindings).toEqual([]);
|
|
||||||
expect(rbac.version).toBe(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('enforces unique name', async () => {
|
|
||||||
await prisma.rbacDefinition.create({ data: { name: 'dup-rbac' } });
|
|
||||||
await expect(prisma.rbacDefinition.create({ data: { name: 'dup-rbac' } })).rejects.toThrow();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores subjects as JSON', async () => {
|
|
||||||
const rbac = await prisma.rbacDefinition.create({
|
|
||||||
data: {
|
|
||||||
name: 'with-subjects',
|
|
||||||
subjects: [{ kind: 'User', name: 'alice@test.com' }, { kind: 'Group', name: 'devs' }],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const subjects = rbac.subjects as Array<{ kind: string; name: string }>;
|
|
||||||
expect(subjects).toHaveLength(2);
|
|
||||||
expect(subjects[0].kind).toBe('User');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores roleBindings as JSON', async () => {
|
|
||||||
const rbac = await prisma.rbacDefinition.create({
|
|
||||||
data: {
|
|
||||||
name: 'with-bindings',
|
|
||||||
roleBindings: [{ role: 'editor', resource: 'servers' }],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const bindings = rbac.roleBindings as Array<{ role: string; resource: string }>;
|
|
||||||
expect(bindings).toHaveLength(1);
|
|
||||||
expect(bindings[0].role).toBe('editor');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('updates subjects and roleBindings', async () => {
|
|
||||||
const rbac = await prisma.rbacDefinition.create({ data: { name: 'updatable-rbac' } });
|
|
||||||
const updated = await prisma.rbacDefinition.update({
|
|
||||||
where: { id: rbac.id },
|
|
||||||
data: {
|
|
||||||
subjects: [{ kind: 'User', name: 'bob@test.com' }],
|
|
||||||
roleBindings: [{ role: 'admin', resource: '*' }],
|
|
||||||
},
|
|
||||||
});
|
|
||||||
expect((updated.subjects as unknown[]).length).toBe(1);
|
|
||||||
expect((updated.roleBindings as unknown[]).length).toBe(1);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// ── ProjectServer model ──
|
|
||||||
|
|
||||||
describe('ProjectServer', () => {
|
|
||||||
it('links project to server', async () => {
|
|
||||||
const project = await createProject();
|
|
||||||
const server = await createServer();
|
|
||||||
const ps = await prisma.projectServer.create({
|
|
||||||
data: { projectId: project.id, serverId: server.id },
|
|
||||||
});
|
|
||||||
expect(ps.projectId).toBe(project.id);
|
|
||||||
expect(ps.serverId).toBe(server.id);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('enforces unique project-server pair', async () => {
|
|
||||||
const project = await createProject();
|
|
||||||
const server = await createServer();
|
|
||||||
await prisma.projectServer.create({ data: { projectId: project.id, serverId: server.id } });
|
|
||||||
await expect(
|
|
||||||
prisma.projectServer.create({ data: { projectId: project.id, serverId: server.id } }),
|
|
||||||
).rejects.toThrow();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('cascades delete when project is deleted', async () => {
|
|
||||||
const project = await createProject();
|
|
||||||
const server = await createServer();
|
|
||||||
await prisma.projectServer.create({ data: { projectId: project.id, serverId: server.id } });
|
|
||||||
await prisma.project.delete({ where: { id: project.id } });
|
|
||||||
const links = await prisma.projectServer.findMany({ where: { projectId: project.id } });
|
|
||||||
expect(links).toHaveLength(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('cascades delete when server is deleted', async () => {
|
|
||||||
const project = await createProject();
|
|
||||||
const server = await createServer();
|
|
||||||
await prisma.projectServer.create({ data: { projectId: project.id, serverId: server.id } });
|
|
||||||
await prisma.mcpServer.delete({ where: { id: server.id } });
|
|
||||||
const links = await prisma.projectServer.findMany({ where: { serverId: server.id } });
|
|
||||||
expect(links).toHaveLength(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// ── ProjectMember model ──
|
|
||||||
|
|
||||||
describe('ProjectMember', () => {
|
|
||||||
it('links project to user with role', async () => {
|
|
||||||
const user = await createUser();
|
|
||||||
const project = await createProject({ ownerId: user.id });
|
|
||||||
const pm = await prisma.projectMember.create({
|
|
||||||
data: { projectId: project.id, userId: user.id, role: 'admin' },
|
|
||||||
});
|
|
||||||
expect(pm.role).toBe('admin');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('defaults role to member', async () => {
|
|
||||||
const user = await createUser();
|
|
||||||
const project = await createProject({ ownerId: user.id });
|
|
||||||
const pm = await prisma.projectMember.create({
|
|
||||||
data: { projectId: project.id, userId: user.id },
|
|
||||||
});
|
|
||||||
expect(pm.role).toBe('member');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('enforces unique project-user pair', async () => {
|
|
||||||
const user = await createUser();
|
|
||||||
const project = await createProject({ ownerId: user.id });
|
|
||||||
await prisma.projectMember.create({ data: { projectId: project.id, userId: user.id } });
|
|
||||||
await expect(
|
|
||||||
prisma.projectMember.create({ data: { projectId: project.id, userId: user.id } }),
|
|
||||||
).rejects.toThrow();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('cascades delete when project is deleted', async () => {
|
|
||||||
const user = await createUser();
|
|
||||||
const project = await createProject({ ownerId: user.id });
|
|
||||||
await prisma.projectMember.create({ data: { projectId: project.id, userId: user.id } });
|
|
||||||
await prisma.project.delete({ where: { id: project.id } });
|
|
||||||
const members = await prisma.projectMember.findMany({ where: { projectId: project.id } });
|
|
||||||
expect(members).toHaveLength(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// ── Project new fields ──
|
|
||||||
|
|
||||||
describe('Project new fields', () => {
|
|
||||||
it('defaults proxyMode to direct', async () => {
|
|
||||||
const project = await createProject();
|
|
||||||
expect(project.proxyMode).toBe('direct');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('stores proxyMode, llmProvider, llmModel', async () => {
|
|
||||||
const user = await createUser();
|
|
||||||
const project = await prisma.project.create({
|
|
||||||
data: {
|
|
||||||
name: 'filtered-project',
|
|
||||||
ownerId: user.id,
|
|
||||||
proxyMode: 'filtered',
|
|
||||||
llmProvider: 'gemini-cli',
|
|
||||||
llmModel: 'gemini-2.0-flash',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
expect(project.proxyMode).toBe('filtered');
|
|
||||||
expect(project.llmProvider).toBe('gemini-cli');
|
|
||||||
expect(project.llmModel).toBe('gemini-2.0-flash');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('defaults llmProvider and llmModel to null', async () => {
|
|
||||||
const project = await createProject();
|
|
||||||
expect(project.llmProvider).toBeNull();
|
|
||||||
expect(project.llmModel).toBeNull();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import { describe, it, expect, beforeAll, afterAll, beforeEach } from 'vitest';
|
import { describe, it, expect, beforeAll, afterAll, beforeEach } from 'vitest';
|
||||||
import type { PrismaClient } from '@prisma/client';
|
import type { PrismaClient } from '@prisma/client';
|
||||||
import { setupTestDb, cleanupTestDb, clearAllTables } from './helpers.js';
|
import { setupTestDb, cleanupTestDb, clearAllTables } from './helpers.js';
|
||||||
import { seedTemplates } from '../src/seed/index.js';
|
import { seedMcpServers, defaultServers } from '../src/seed/index.js';
|
||||||
import type { SeedTemplate } from '../src/seed/index.js';
|
|
||||||
|
|
||||||
let prisma: PrismaClient;
|
let prisma: PrismaClient;
|
||||||
|
|
||||||
@@ -18,69 +17,55 @@ beforeEach(async () => {
|
|||||||
await clearAllTables(prisma);
|
await clearAllTables(prisma);
|
||||||
});
|
});
|
||||||
|
|
||||||
const testTemplates: SeedTemplate[] = [
|
describe('seedMcpServers', () => {
|
||||||
{
|
it('seeds all default servers', async () => {
|
||||||
name: 'github',
|
const count = await seedMcpServers(prisma);
|
||||||
version: '1.0.0',
|
expect(count).toBe(defaultServers.length);
|
||||||
description: 'GitHub MCP server',
|
|
||||||
packageName: '@anthropic/github-mcp',
|
|
||||||
transport: 'STDIO',
|
|
||||||
env: [{ name: 'GITHUB_TOKEN', description: 'Personal access token', required: true }],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'slack',
|
|
||||||
version: '1.0.0',
|
|
||||||
description: 'Slack MCP server',
|
|
||||||
packageName: '@anthropic/slack-mcp',
|
|
||||||
transport: 'STDIO',
|
|
||||||
env: [],
|
|
||||||
},
|
|
||||||
];
|
|
||||||
|
|
||||||
describe('seedTemplates', () => {
|
const servers = await prisma.mcpServer.findMany({ orderBy: { name: 'asc' } });
|
||||||
it('seeds templates', async () => {
|
expect(servers).toHaveLength(defaultServers.length);
|
||||||
const count = await seedTemplates(prisma, testTemplates);
|
|
||||||
expect(count).toBe(2);
|
|
||||||
|
|
||||||
const templates = await prisma.mcpTemplate.findMany({ orderBy: { name: 'asc' } });
|
const names = servers.map((s) => s.name);
|
||||||
expect(templates).toHaveLength(2);
|
expect(names).toContain('slack');
|
||||||
expect(templates.map((t) => t.name)).toEqual(['github', 'slack']);
|
expect(names).toContain('github');
|
||||||
|
expect(names).toContain('jira');
|
||||||
|
expect(names).toContain('terraform');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('is idempotent (upsert)', async () => {
|
it('is idempotent (upsert)', async () => {
|
||||||
await seedTemplates(prisma, testTemplates);
|
await seedMcpServers(prisma);
|
||||||
const count = await seedTemplates(prisma, testTemplates);
|
const count = await seedMcpServers(prisma);
|
||||||
expect(count).toBe(2);
|
expect(count).toBe(defaultServers.length);
|
||||||
|
|
||||||
const templates = await prisma.mcpTemplate.findMany();
|
const servers = await prisma.mcpServer.findMany();
|
||||||
expect(templates).toHaveLength(2);
|
expect(servers).toHaveLength(defaultServers.length);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('seeds env correctly', async () => {
|
it('seeds envTemplate correctly', async () => {
|
||||||
await seedTemplates(prisma, testTemplates);
|
await seedMcpServers(prisma);
|
||||||
const github = await prisma.mcpTemplate.findUnique({ where: { name: 'github' } });
|
const slack = await prisma.mcpServer.findUnique({ where: { name: 'slack' } });
|
||||||
const env = github!.env as Array<{ name: string; description?: string; required?: boolean }>;
|
const envTemplate = slack!.envTemplate as Array<{ name: string; isSecret: boolean }>;
|
||||||
expect(env).toHaveLength(1);
|
expect(envTemplate).toHaveLength(2);
|
||||||
expect(env[0].name).toBe('GITHUB_TOKEN');
|
expect(envTemplate[0].name).toBe('SLACK_BOT_TOKEN');
|
||||||
expect(env[0].required).toBe(true);
|
expect(envTemplate[0].isSecret).toBe(true);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('accepts custom template list', async () => {
|
it('accepts custom server list', async () => {
|
||||||
const custom: SeedTemplate[] = [
|
const custom = [
|
||||||
{
|
{
|
||||||
name: 'custom-template',
|
name: 'custom-server',
|
||||||
version: '2.0.0',
|
description: 'Custom test server',
|
||||||
description: 'Custom test template',
|
|
||||||
packageName: '@test/custom',
|
packageName: '@test/custom',
|
||||||
transport: 'STDIO',
|
transport: 'STDIO' as const,
|
||||||
env: [],
|
repositoryUrl: 'https://example.com',
|
||||||
|
envTemplate: [],
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
const count = await seedTemplates(prisma, custom);
|
const count = await seedMcpServers(prisma, custom);
|
||||||
expect(count).toBe(1);
|
expect(count).toBe(1);
|
||||||
|
|
||||||
const templates = await prisma.mcpTemplate.findMany();
|
const servers = await prisma.mcpServer.findMany();
|
||||||
expect(templates).toHaveLength(1);
|
expect(servers).toHaveLength(1);
|
||||||
expect(templates[0].name).toBe('custom-template');
|
expect(servers[0].name).toBe('custom-server');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"name": "@mcpctl/mcplocal",
|
"name": "@mcpctl/local-proxy",
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
@@ -14,12 +14,7 @@
|
|||||||
"test:run": "vitest run"
|
"test:run": "vitest run"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@fastify/cors": "^10.0.0",
|
|
||||||
"@mcpctl/shared": "workspace:*",
|
|
||||||
"@modelcontextprotocol/sdk": "^1.0.0",
|
"@modelcontextprotocol/sdk": "^1.0.0",
|
||||||
"fastify": "^5.0.0"
|
"@mcpctl/shared": "workspace:*"
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/node": "^25.3.0"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
2
src/local-proxy/src/index.ts
Normal file
2
src/local-proxy/src/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
// Local LLM proxy entry point
|
||||||
|
// Will be implemented in Task 11
|
||||||
@@ -2,8 +2,7 @@
|
|||||||
"extends": "../../tsconfig.base.json",
|
"extends": "../../tsconfig.base.json",
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"rootDir": "src",
|
"rootDir": "src",
|
||||||
"outDir": "dist",
|
"outDir": "dist"
|
||||||
"types": ["node"]
|
|
||||||
},
|
},
|
||||||
"include": ["src/**/*.ts"],
|
"include": ["src/**/*.ts"],
|
||||||
"references": [
|
"references": [
|
||||||
@@ -2,7 +2,7 @@ import { defineProject } from 'vitest/config';
|
|||||||
|
|
||||||
export default defineProject({
|
export default defineProject({
|
||||||
test: {
|
test: {
|
||||||
name: 'mcplocal',
|
name: 'local-proxy',
|
||||||
include: ['tests/**/*.test.ts'],
|
include: ['tests/**/*.test.ts'],
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@@ -9,7 +9,7 @@
|
|||||||
"build": "tsc --build",
|
"build": "tsc --build",
|
||||||
"clean": "rimraf dist",
|
"clean": "rimraf dist",
|
||||||
"dev": "tsx watch src/index.ts",
|
"dev": "tsx watch src/index.ts",
|
||||||
"start": "node dist/main.js",
|
"start": "node dist/index.js",
|
||||||
"test": "vitest",
|
"test": "vitest",
|
||||||
"test:run": "vitest run"
|
"test:run": "vitest run"
|
||||||
},
|
},
|
||||||
@@ -20,16 +20,10 @@
|
|||||||
"@mcpctl/db": "workspace:*",
|
"@mcpctl/db": "workspace:*",
|
||||||
"@mcpctl/shared": "workspace:*",
|
"@mcpctl/shared": "workspace:*",
|
||||||
"@prisma/client": "^6.0.0",
|
"@prisma/client": "^6.0.0",
|
||||||
"bcrypt": "^5.1.1",
|
|
||||||
"dockerode": "^4.0.9",
|
|
||||||
"fastify": "^5.0.0",
|
"fastify": "^5.0.0",
|
||||||
"js-yaml": "^4.1.0",
|
|
||||||
"zod": "^3.24.0"
|
"zod": "^3.24.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/bcrypt": "^5.0.2",
|
|
||||||
"@types/dockerode": "^4.0.1",
|
|
||||||
"@types/js-yaml": "^4.0.9",
|
|
||||||
"@types/node": "^25.3.0"
|
"@types/node": "^25.3.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,386 +0,0 @@
|
|||||||
import { readdirSync, readFileSync } from 'node:fs';
|
|
||||||
import { join } from 'node:path';
|
|
||||||
import { PrismaClient } from '@prisma/client';
|
|
||||||
import yaml from 'js-yaml';
|
|
||||||
import { seedTemplates } from '@mcpctl/db';
|
|
||||||
import type { SeedTemplate } from '@mcpctl/db';
|
|
||||||
import { loadConfigFromEnv } from './config/index.js';
|
|
||||||
import { createServer } from './server.js';
|
|
||||||
import { setupGracefulShutdown } from './utils/index.js';
|
|
||||||
import {
|
|
||||||
McpServerRepository,
|
|
||||||
SecretRepository,
|
|
||||||
McpInstanceRepository,
|
|
||||||
ProjectRepository,
|
|
||||||
AuditLogRepository,
|
|
||||||
TemplateRepository,
|
|
||||||
RbacDefinitionRepository,
|
|
||||||
UserRepository,
|
|
||||||
GroupRepository,
|
|
||||||
} from './repositories/index.js';
|
|
||||||
import {
|
|
||||||
McpServerService,
|
|
||||||
SecretService,
|
|
||||||
InstanceService,
|
|
||||||
ProjectService,
|
|
||||||
AuditLogService,
|
|
||||||
DockerContainerManager,
|
|
||||||
MetricsCollector,
|
|
||||||
HealthAggregator,
|
|
||||||
BackupService,
|
|
||||||
RestoreService,
|
|
||||||
AuthService,
|
|
||||||
McpProxyService,
|
|
||||||
TemplateService,
|
|
||||||
HealthProbeRunner,
|
|
||||||
RbacDefinitionService,
|
|
||||||
RbacService,
|
|
||||||
UserService,
|
|
||||||
GroupService,
|
|
||||||
} from './services/index.js';
|
|
||||||
import type { RbacAction } from './services/index.js';
|
|
||||||
import type { UpdateRbacDefinitionInput } from './validation/rbac-definition.schema.js';
|
|
||||||
import { createAuthMiddleware } from './middleware/auth.js';
|
|
||||||
import {
|
|
||||||
registerMcpServerRoutes,
|
|
||||||
registerSecretRoutes,
|
|
||||||
registerInstanceRoutes,
|
|
||||||
registerProjectRoutes,
|
|
||||||
registerAuditLogRoutes,
|
|
||||||
registerHealthMonitoringRoutes,
|
|
||||||
registerBackupRoutes,
|
|
||||||
registerAuthRoutes,
|
|
||||||
registerMcpProxyRoutes,
|
|
||||||
registerTemplateRoutes,
|
|
||||||
registerRbacRoutes,
|
|
||||||
registerUserRoutes,
|
|
||||||
registerGroupRoutes,
|
|
||||||
} from './routes/index.js';
|
|
||||||
|
|
||||||
type PermissionCheck =
|
|
||||||
| { kind: 'resource'; resource: string; action: RbacAction; resourceName?: string }
|
|
||||||
| { kind: 'operation'; operation: string }
|
|
||||||
| { kind: 'skip' };
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Map an HTTP method + URL to a permission check.
|
|
||||||
* Returns 'skip' for URLs that should not be RBAC-checked.
|
|
||||||
*/
|
|
||||||
function mapUrlToPermission(method: string, url: string): PermissionCheck {
|
|
||||||
const match = url.match(/^\/api\/v1\/([a-z-]+)/);
|
|
||||||
if (!match) return { kind: 'skip' };
|
|
||||||
|
|
||||||
const segment = match[1] as string;
|
|
||||||
|
|
||||||
// Operations (non-resource endpoints)
|
|
||||||
if (segment === 'backup') return { kind: 'operation', operation: 'backup' };
|
|
||||||
if (segment === 'restore') return { kind: 'operation', operation: 'restore' };
|
|
||||||
if (segment === 'audit-logs' && method === 'DELETE') return { kind: 'operation', operation: 'audit-purge' };
|
|
||||||
|
|
||||||
const resourceMap: Record<string, string | undefined> = {
|
|
||||||
'servers': 'servers',
|
|
||||||
'instances': 'instances',
|
|
||||||
'secrets': 'secrets',
|
|
||||||
'projects': 'projects',
|
|
||||||
'templates': 'templates',
|
|
||||||
'users': 'users',
|
|
||||||
'groups': 'groups',
|
|
||||||
'rbac': 'rbac',
|
|
||||||
'audit-logs': 'rbac',
|
|
||||||
'mcp': 'servers',
|
|
||||||
};
|
|
||||||
|
|
||||||
const resource = resourceMap[segment];
|
|
||||||
if (resource === undefined) return { kind: 'skip' };
|
|
||||||
|
|
||||||
// Special case: /api/v1/projects/:id/mcp-config → requires 'expose' permission
|
|
||||||
const mcpConfigMatch = url.match(/^\/api\/v1\/projects\/([^/?]+)\/mcp-config/);
|
|
||||||
if (mcpConfigMatch?.[1]) {
|
|
||||||
return { kind: 'resource', resource: 'projects', action: 'expose', resourceName: mcpConfigMatch[1] };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special case: /api/v1/projects/:id/servers — attach/detach requires 'edit'
|
|
||||||
const projectServersMatch = url.match(/^\/api\/v1\/projects\/([^/?]+)\/servers/);
|
|
||||||
if (projectServersMatch?.[1] && method !== 'GET') {
|
|
||||||
return { kind: 'resource', resource: 'projects', action: 'edit', resourceName: projectServersMatch[1] };
|
|
||||||
}
|
|
||||||
|
|
||||||
// Map HTTP method to action
|
|
||||||
let action: RbacAction;
|
|
||||||
switch (method) {
|
|
||||||
case 'GET':
|
|
||||||
case 'HEAD':
|
|
||||||
action = 'view';
|
|
||||||
break;
|
|
||||||
case 'POST':
|
|
||||||
action = 'create';
|
|
||||||
break;
|
|
||||||
case 'DELETE':
|
|
||||||
action = 'delete';
|
|
||||||
break;
|
|
||||||
default: // PUT, PATCH
|
|
||||||
action = 'edit';
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Extract resource name/ID from URL (3rd segment: /api/v1/servers/:nameOrId)
|
|
||||||
const nameMatch = url.match(/^\/api\/v1\/[a-z-]+\/([^/?]+)/);
|
|
||||||
const resourceName = nameMatch?.[1];
|
|
||||||
|
|
||||||
const check: PermissionCheck = { kind: 'resource', resource, action };
|
|
||||||
if (resourceName !== undefined) (check as { resourceName: string }).resourceName = resourceName;
|
|
||||||
return check;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Migrate legacy 'admin' role bindings → granular roles.
|
|
||||||
* Old format: { role: 'admin', resource: '*' }
|
|
||||||
* New format: { role: 'edit', resource: '*' }, { role: 'run', resource: '*' },
|
|
||||||
* plus operation bindings for impersonate, logs, backup, restore, audit-purge
|
|
||||||
*/
|
|
||||||
async function migrateAdminRole(rbacRepo: InstanceType<typeof RbacDefinitionRepository>): Promise<void> {
|
|
||||||
const definitions = await rbacRepo.findAll();
|
|
||||||
for (const def of definitions) {
|
|
||||||
const bindings = def.roleBindings as Array<Record<string, unknown>>;
|
|
||||||
const hasAdminRole = bindings.some((b) => b['role'] === 'admin');
|
|
||||||
if (!hasAdminRole) continue;
|
|
||||||
|
|
||||||
// Replace admin bindings with granular equivalents
|
|
||||||
const newBindings: Array<Record<string, string>> = [];
|
|
||||||
for (const b of bindings) {
|
|
||||||
if (b['role'] === 'admin') {
|
|
||||||
const resource = b['resource'] as string;
|
|
||||||
newBindings.push({ role: 'edit', resource });
|
|
||||||
newBindings.push({ role: 'run', resource });
|
|
||||||
} else {
|
|
||||||
newBindings.push(b as Record<string, string>);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Add operation bindings (idempotent — only for wildcard admin)
|
|
||||||
const hasWildcard = bindings.some((b) => b['role'] === 'admin' && b['resource'] === '*');
|
|
||||||
if (hasWildcard) {
|
|
||||||
const ops = ['impersonate', 'logs', 'backup', 'restore', 'audit-purge'];
|
|
||||||
for (const op of ops) {
|
|
||||||
if (!newBindings.some((b) => b['action'] === op)) {
|
|
||||||
newBindings.push({ role: 'run', action: op });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
await rbacRepo.update(def.id, { roleBindings: newBindings as UpdateRbacDefinitionInput['roleBindings'] });
|
|
||||||
// eslint-disable-next-line no-console
|
|
||||||
console.log(`mcpd: migrated RBAC '${def.name}' from admin → granular roles`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function main(): Promise<void> {
|
|
||||||
const config = loadConfigFromEnv();
|
|
||||||
|
|
||||||
// Database
|
|
||||||
const prisma = new PrismaClient({
|
|
||||||
datasources: { db: { url: config.databaseUrl } },
|
|
||||||
});
|
|
||||||
await prisma.$connect();
|
|
||||||
|
|
||||||
// Seed templates from YAML files
|
|
||||||
const templatesDir = process.env.TEMPLATES_DIR ?? 'templates';
|
|
||||||
const templateFiles = (() => {
|
|
||||||
try {
|
|
||||||
return readdirSync(templatesDir).filter((f) => f.endsWith('.yaml') || f.endsWith('.yml'));
|
|
||||||
} catch {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
const templates: SeedTemplate[] = templateFiles.map((f) => {
|
|
||||||
const content = readFileSync(join(templatesDir, f), 'utf-8');
|
|
||||||
const parsed = yaml.load(content) as SeedTemplate;
|
|
||||||
return {
|
|
||||||
...parsed,
|
|
||||||
transport: parsed.transport ?? 'STDIO',
|
|
||||||
version: parsed.version ?? '1.0.0',
|
|
||||||
description: parsed.description ?? '',
|
|
||||||
...(parsed.healthCheck ? { healthCheck: parsed.healthCheck } : {}),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
await seedTemplates(prisma, templates);
|
|
||||||
|
|
||||||
// Repositories
|
|
||||||
const serverRepo = new McpServerRepository(prisma);
|
|
||||||
const secretRepo = new SecretRepository(prisma);
|
|
||||||
const instanceRepo = new McpInstanceRepository(prisma);
|
|
||||||
const projectRepo = new ProjectRepository(prisma);
|
|
||||||
const auditLogRepo = new AuditLogRepository(prisma);
|
|
||||||
const templateRepo = new TemplateRepository(prisma);
|
|
||||||
const rbacDefinitionRepo = new RbacDefinitionRepository(prisma);
|
|
||||||
const userRepo = new UserRepository(prisma);
|
|
||||||
const groupRepo = new GroupRepository(prisma);
|
|
||||||
|
|
||||||
// CUID detection for RBAC name resolution
|
|
||||||
const CUID_RE = /^c[^\s-]{8,}$/i;
|
|
||||||
const nameResolvers: Record<string, { findById(id: string): Promise<{ name: string } | null> }> = {
|
|
||||||
servers: serverRepo,
|
|
||||||
secrets: secretRepo,
|
|
||||||
projects: projectRepo,
|
|
||||||
groups: groupRepo,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Migrate legacy 'admin' role → granular roles
|
|
||||||
await migrateAdminRole(rbacDefinitionRepo);
|
|
||||||
|
|
||||||
// Orchestrator
|
|
||||||
const orchestrator = new DockerContainerManager();
|
|
||||||
|
|
||||||
// Services
|
|
||||||
const serverService = new McpServerService(serverRepo);
|
|
||||||
const instanceService = new InstanceService(instanceRepo, serverRepo, orchestrator, secretRepo);
|
|
||||||
serverService.setInstanceService(instanceService);
|
|
||||||
const secretService = new SecretService(secretRepo);
|
|
||||||
const projectService = new ProjectService(projectRepo, serverRepo, secretRepo);
|
|
||||||
const auditLogService = new AuditLogService(auditLogRepo);
|
|
||||||
const metricsCollector = new MetricsCollector();
|
|
||||||
const healthAggregator = new HealthAggregator(metricsCollector, orchestrator);
|
|
||||||
const backupService = new BackupService(serverRepo, projectRepo, secretRepo, userRepo, groupRepo, rbacDefinitionRepo);
|
|
||||||
const restoreService = new RestoreService(serverRepo, projectRepo, secretRepo, userRepo, groupRepo, rbacDefinitionRepo);
|
|
||||||
const authService = new AuthService(prisma);
|
|
||||||
const templateService = new TemplateService(templateRepo);
|
|
||||||
const mcpProxyService = new McpProxyService(instanceRepo, serverRepo);
|
|
||||||
const rbacDefinitionService = new RbacDefinitionService(rbacDefinitionRepo);
|
|
||||||
const rbacService = new RbacService(rbacDefinitionRepo, prisma);
|
|
||||||
const userService = new UserService(userRepo);
|
|
||||||
const groupService = new GroupService(groupRepo, userRepo);
|
|
||||||
|
|
||||||
// Auth middleware for global hooks
|
|
||||||
const authMiddleware = createAuthMiddleware({
|
|
||||||
findSession: (token) => authService.findSession(token),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Server
|
|
||||||
const app = await createServer(config, {
|
|
||||||
health: {
|
|
||||||
checkDb: async () => {
|
|
||||||
try {
|
|
||||||
await prisma.$queryRaw`SELECT 1`;
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// ── Global auth hook ──
|
|
||||||
// Runs on all /api/v1/* routes EXCEPT auth endpoints and health checks.
|
|
||||||
// Tests that use createServer() directly are NOT affected — this hook
|
|
||||||
// is only registered here in main.ts.
|
|
||||||
app.addHook('preHandler', async (request, reply) => {
|
|
||||||
const url = request.url;
|
|
||||||
// Skip auth for health, auth, and root
|
|
||||||
if (url.startsWith('/api/v1/auth/') || url === '/healthz' || url === '/health') return;
|
|
||||||
if (!url.startsWith('/api/v1/')) return;
|
|
||||||
|
|
||||||
// Run auth middleware
|
|
||||||
await authMiddleware(request, reply);
|
|
||||||
});
|
|
||||||
|
|
||||||
// ── Global RBAC hook ──
|
|
||||||
// Runs after the auth hook. Maps URL to resource+action and checks permissions.
|
|
||||||
app.addHook('preHandler', async (request, reply) => {
|
|
||||||
if (reply.sent) return; // Auth hook already rejected
|
|
||||||
const url = request.url;
|
|
||||||
if (url.startsWith('/api/v1/auth/') || url === '/healthz' || url === '/health') return;
|
|
||||||
if (!url.startsWith('/api/v1/')) return;
|
|
||||||
if (request.userId === undefined) return; // Auth hook will handle 401
|
|
||||||
|
|
||||||
const check = mapUrlToPermission(request.method, url);
|
|
||||||
if (check.kind === 'skip') return;
|
|
||||||
|
|
||||||
let allowed: boolean;
|
|
||||||
if (check.kind === 'operation') {
|
|
||||||
allowed = await rbacService.canRunOperation(request.userId, check.operation);
|
|
||||||
} else {
|
|
||||||
// Resolve CUID → human name for name-scoped RBAC bindings
|
|
||||||
if (check.resourceName !== undefined && CUID_RE.test(check.resourceName)) {
|
|
||||||
const resolver = nameResolvers[check.resource];
|
|
||||||
if (resolver) {
|
|
||||||
const entity = await resolver.findById(check.resourceName);
|
|
||||||
if (entity) check.resourceName = entity.name;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
allowed = await rbacService.canAccess(request.userId, check.action, check.resource, check.resourceName);
|
|
||||||
// Compute scope for list filtering (used by preSerialization hook)
|
|
||||||
if (allowed && check.resourceName === undefined) {
|
|
||||||
request.rbacScope = await rbacService.getAllowedScope(request.userId, check.action, check.resource);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!allowed) {
|
|
||||||
reply.code(403).send({ error: 'Forbidden' });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Routes
|
|
||||||
registerMcpServerRoutes(app, serverService, instanceService);
|
|
||||||
registerTemplateRoutes(app, templateService);
|
|
||||||
registerSecretRoutes(app, secretService);
|
|
||||||
registerInstanceRoutes(app, instanceService);
|
|
||||||
registerProjectRoutes(app, projectService);
|
|
||||||
registerAuditLogRoutes(app, auditLogService);
|
|
||||||
registerHealthMonitoringRoutes(app, { healthAggregator, metricsCollector });
|
|
||||||
registerBackupRoutes(app, { backupService, restoreService });
|
|
||||||
registerAuthRoutes(app, { authService, userService, groupService, rbacDefinitionService, rbacService });
|
|
||||||
registerMcpProxyRoutes(app, {
|
|
||||||
mcpProxyService,
|
|
||||||
auditLogService,
|
|
||||||
authDeps: { findSession: (token) => authService.findSession(token) },
|
|
||||||
});
|
|
||||||
registerRbacRoutes(app, rbacDefinitionService);
|
|
||||||
registerUserRoutes(app, userService);
|
|
||||||
registerGroupRoutes(app, groupService);
|
|
||||||
|
|
||||||
// ── RBAC list filtering hook ──
|
|
||||||
// Filters array responses to only include resources the user is allowed to see.
|
|
||||||
app.addHook('preSerialization', async (request, _reply, payload) => {
|
|
||||||
if (!request.rbacScope || request.rbacScope.wildcard) return payload;
|
|
||||||
if (!Array.isArray(payload)) return payload;
|
|
||||||
return (payload as Array<Record<string, unknown>>).filter((item) => {
|
|
||||||
const name = item['name'];
|
|
||||||
return typeof name === 'string' && request.rbacScope!.names.has(name);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// Start
|
|
||||||
await app.listen({ port: config.port, host: config.host });
|
|
||||||
app.log.info(`mcpd listening on ${config.host}:${config.port}`);
|
|
||||||
|
|
||||||
// Periodic container liveness sync — detect crashed containers
|
|
||||||
const SYNC_INTERVAL_MS = 30_000; // 30s
|
|
||||||
const syncTimer = setInterval(async () => {
|
|
||||||
try {
|
|
||||||
await instanceService.syncStatus();
|
|
||||||
} catch (err) {
|
|
||||||
app.log.error({ err }, 'Container status sync failed');
|
|
||||||
}
|
|
||||||
}, SYNC_INTERVAL_MS);
|
|
||||||
|
|
||||||
// Health probe runner — periodic MCP tool-call probes (like k8s livenessProbe)
|
|
||||||
const healthProbeRunner = new HealthProbeRunner(
|
|
||||||
instanceRepo,
|
|
||||||
serverRepo,
|
|
||||||
orchestrator,
|
|
||||||
{ info: (msg) => app.log.info(msg), error: (obj, msg) => app.log.error(obj, msg) },
|
|
||||||
);
|
|
||||||
healthProbeRunner.start(15_000);
|
|
||||||
|
|
||||||
// Graceful shutdown
|
|
||||||
setupGracefulShutdown(app, {
|
|
||||||
disconnectDb: async () => {
|
|
||||||
clearInterval(syncTimer);
|
|
||||||
healthProbeRunner.stop();
|
|
||||||
await prisma.$disconnect();
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
main().catch((err) => {
|
|
||||||
console.error('Failed to start mcpd:', err);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
@@ -7,7 +7,6 @@ export interface AuthDeps {
|
|||||||
declare module 'fastify' {
|
declare module 'fastify' {
|
||||||
interface FastifyRequest {
|
interface FastifyRequest {
|
||||||
userId?: string;
|
userId?: string;
|
||||||
rbacScope?: { wildcard: boolean; names: Set<string> };
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,36 +0,0 @@
|
|||||||
import type { FastifyRequest, FastifyReply } from 'fastify';
|
|
||||||
import type { RbacService, RbacAction } from '../services/rbac.service.js';
|
|
||||||
|
|
||||||
export function createRbacMiddleware(rbacService: RbacService) {
|
|
||||||
function requirePermission(resource: string, action: RbacAction, resourceName?: string) {
|
|
||||||
return async (request: FastifyRequest, reply: FastifyReply): Promise<void> => {
|
|
||||||
if (request.userId === undefined) {
|
|
||||||
reply.code(401).send({ error: 'Authentication required' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const allowed = await rbacService.canAccess(request.userId, action, resource, resourceName);
|
|
||||||
if (!allowed) {
|
|
||||||
reply.code(403).send({ error: 'Forbidden' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function requireOperation(operation: string) {
|
|
||||||
return async (request: FastifyRequest, reply: FastifyReply): Promise<void> => {
|
|
||||||
if (request.userId === undefined) {
|
|
||||||
reply.code(401).send({ error: 'Authentication required' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const allowed = await rbacService.canRunOperation(request.userId, operation);
|
|
||||||
if (!allowed) {
|
|
||||||
reply.code(403).send({ error: 'Forbidden' });
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return { requirePermission, requireOperation };
|
|
||||||
}
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
import type { PrismaClient, AuditLog, Prisma } from '@prisma/client';
|
|
||||||
import type { IAuditLogRepository, AuditLogFilter } from './interfaces.js';
|
|
||||||
|
|
||||||
export class AuditLogRepository implements IAuditLogRepository {
|
|
||||||
constructor(private readonly prisma: PrismaClient) {}
|
|
||||||
|
|
||||||
async findAll(filter?: AuditLogFilter): Promise<AuditLog[]> {
|
|
||||||
const where = buildWhere(filter);
|
|
||||||
return this.prisma.auditLog.findMany({
|
|
||||||
where,
|
|
||||||
orderBy: { createdAt: 'desc' },
|
|
||||||
take: filter?.limit ?? 100,
|
|
||||||
skip: filter?.offset ?? 0,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async findById(id: string): Promise<AuditLog | null> {
|
|
||||||
return this.prisma.auditLog.findUnique({ where: { id } });
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(data: {
|
|
||||||
userId: string;
|
|
||||||
action: string;
|
|
||||||
resource: string;
|
|
||||||
resourceId?: string;
|
|
||||||
details?: Record<string, unknown>;
|
|
||||||
}): Promise<AuditLog> {
|
|
||||||
const createData: Prisma.AuditLogUncheckedCreateInput = {
|
|
||||||
userId: data.userId,
|
|
||||||
action: data.action,
|
|
||||||
resource: data.resource,
|
|
||||||
details: (data.details ?? {}) as Prisma.InputJsonValue,
|
|
||||||
};
|
|
||||||
if (data.resourceId !== undefined) {
|
|
||||||
createData.resourceId = data.resourceId;
|
|
||||||
}
|
|
||||||
return this.prisma.auditLog.create({ data: createData });
|
|
||||||
}
|
|
||||||
|
|
||||||
async count(filter?: AuditLogFilter): Promise<number> {
|
|
||||||
const where = buildWhere(filter);
|
|
||||||
return this.prisma.auditLog.count({ where });
|
|
||||||
}
|
|
||||||
|
|
||||||
async deleteOlderThan(date: Date): Promise<number> {
|
|
||||||
const result = await this.prisma.auditLog.deleteMany({
|
|
||||||
where: { createdAt: { lt: date } },
|
|
||||||
});
|
|
||||||
return result.count;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildWhere(filter?: AuditLogFilter): Prisma.AuditLogWhereInput {
|
|
||||||
const where: Prisma.AuditLogWhereInput = {};
|
|
||||||
if (!filter) return where;
|
|
||||||
|
|
||||||
if (filter.userId !== undefined) where.userId = filter.userId;
|
|
||||||
if (filter.action !== undefined) where.action = filter.action;
|
|
||||||
if (filter.resource !== undefined) where.resource = filter.resource;
|
|
||||||
if (filter.resourceId !== undefined) where.resourceId = filter.resourceId;
|
|
||||||
|
|
||||||
if (filter.since !== undefined || filter.until !== undefined) {
|
|
||||||
const createdAt: Prisma.DateTimeFilter = {};
|
|
||||||
if (filter.since !== undefined) createdAt.gte = filter.since;
|
|
||||||
if (filter.until !== undefined) createdAt.lte = filter.until;
|
|
||||||
where.createdAt = createdAt;
|
|
||||||
}
|
|
||||||
|
|
||||||
return where;
|
|
||||||
}
|
|
||||||
@@ -1,93 +0,0 @@
|
|||||||
import type { PrismaClient, Group } from '@prisma/client';
|
|
||||||
|
|
||||||
export interface GroupWithMembers extends Group {
|
|
||||||
members: Array<{ id: string; user: { id: string; email: string; name: string | null } }>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface IGroupRepository {
|
|
||||||
findAll(): Promise<GroupWithMembers[]>;
|
|
||||||
findById(id: string): Promise<GroupWithMembers | null>;
|
|
||||||
findByName(name: string): Promise<GroupWithMembers | null>;
|
|
||||||
create(data: { name: string; description?: string }): Promise<Group>;
|
|
||||||
update(id: string, data: { description?: string }): Promise<Group>;
|
|
||||||
delete(id: string): Promise<void>;
|
|
||||||
setMembers(groupId: string, userIds: string[]): Promise<void>;
|
|
||||||
findGroupsForUser(userId: string): Promise<Array<{ id: string; name: string }>>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const MEMBERS_INCLUDE = {
|
|
||||||
members: {
|
|
||||||
select: {
|
|
||||||
id: true,
|
|
||||||
user: {
|
|
||||||
select: { id: true, email: true, name: true },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
} as const;
|
|
||||||
|
|
||||||
export class GroupRepository implements IGroupRepository {
|
|
||||||
constructor(private readonly prisma: PrismaClient) {}
|
|
||||||
|
|
||||||
async findAll(): Promise<GroupWithMembers[]> {
|
|
||||||
return this.prisma.group.findMany({
|
|
||||||
orderBy: { name: 'asc' },
|
|
||||||
include: MEMBERS_INCLUDE,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async findById(id: string): Promise<GroupWithMembers | null> {
|
|
||||||
return this.prisma.group.findUnique({
|
|
||||||
where: { id },
|
|
||||||
include: MEMBERS_INCLUDE,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async findByName(name: string): Promise<GroupWithMembers | null> {
|
|
||||||
return this.prisma.group.findUnique({
|
|
||||||
where: { name },
|
|
||||||
include: MEMBERS_INCLUDE,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(data: { name: string; description?: string }): Promise<Group> {
|
|
||||||
const createData: Record<string, unknown> = { name: data.name };
|
|
||||||
if (data.description !== undefined) createData['description'] = data.description;
|
|
||||||
return this.prisma.group.create({
|
|
||||||
data: createData as Parameters<PrismaClient['group']['create']>[0]['data'],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(id: string, data: { description?: string }): Promise<Group> {
|
|
||||||
const updateData: Record<string, unknown> = {};
|
|
||||||
if (data.description !== undefined) updateData['description'] = data.description;
|
|
||||||
return this.prisma.group.update({ where: { id }, data: updateData });
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(id: string): Promise<void> {
|
|
||||||
await this.prisma.group.delete({ where: { id } });
|
|
||||||
}
|
|
||||||
|
|
||||||
async setMembers(groupId: string, userIds: string[]): Promise<void> {
|
|
||||||
await this.prisma.$transaction(async (tx) => {
|
|
||||||
await tx.groupMember.deleteMany({ where: { groupId } });
|
|
||||||
if (userIds.length > 0) {
|
|
||||||
await tx.groupMember.createMany({
|
|
||||||
data: userIds.map((userId) => ({ groupId, userId })),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async findGroupsForUser(userId: string): Promise<Array<{ id: string; name: string }>> {
|
|
||||||
const memberships = await this.prisma.groupMember.findMany({
|
|
||||||
where: { userId },
|
|
||||||
select: {
|
|
||||||
group: {
|
|
||||||
select: { id: true, name: true },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
return memberships.map((m) => m.group);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,15 +1,5 @@
|
|||||||
export type { IMcpServerRepository, IMcpInstanceRepository, ISecretRepository, IAuditLogRepository, AuditLogFilter } from './interfaces.js';
|
export type { IMcpServerRepository, IMcpProfileRepository } from './interfaces.js';
|
||||||
export { McpServerRepository } from './mcp-server.repository.js';
|
export { McpServerRepository } from './mcp-server.repository.js';
|
||||||
export { SecretRepository } from './secret.repository.js';
|
export { McpProfileRepository } from './mcp-profile.repository.js';
|
||||||
export type { IProjectRepository, ProjectWithRelations } from './project.repository.js';
|
export type { IProjectRepository } from './project.repository.js';
|
||||||
export { ProjectRepository } from './project.repository.js';
|
export { ProjectRepository } from './project.repository.js';
|
||||||
export { McpInstanceRepository } from './mcp-instance.repository.js';
|
|
||||||
export { AuditLogRepository } from './audit-log.repository.js';
|
|
||||||
export type { ITemplateRepository } from './template.repository.js';
|
|
||||||
export { TemplateRepository } from './template.repository.js';
|
|
||||||
export type { IRbacDefinitionRepository } from './rbac-definition.repository.js';
|
|
||||||
export { RbacDefinitionRepository } from './rbac-definition.repository.js';
|
|
||||||
export type { IUserRepository, SafeUser } from './user.repository.js';
|
|
||||||
export { UserRepository } from './user.repository.js';
|
|
||||||
export type { IGroupRepository, GroupWithMembers } from './group.repository.js';
|
|
||||||
export { GroupRepository } from './group.repository.js';
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import type { McpServer, McpInstance, AuditLog, Secret, InstanceStatus } from '@prisma/client';
|
import type { McpServer, McpProfile } from '@prisma/client';
|
||||||
import type { CreateMcpServerInput, UpdateMcpServerInput } from '../validation/mcp-server.schema.js';
|
import type { CreateMcpServerInput, UpdateMcpServerInput } from '../validation/mcp-server.schema.js';
|
||||||
import type { CreateSecretInput, UpdateSecretInput } from '../validation/secret.schema.js';
|
import type { CreateMcpProfileInput, UpdateMcpProfileInput } from '../validation/mcp-profile.schema.js';
|
||||||
|
|
||||||
export interface IMcpServerRepository {
|
export interface IMcpServerRepository {
|
||||||
findAll(): Promise<McpServer[]>;
|
findAll(): Promise<McpServer[]>;
|
||||||
@@ -11,39 +11,11 @@ export interface IMcpServerRepository {
|
|||||||
delete(id: string): Promise<void>;
|
delete(id: string): Promise<void>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface IMcpInstanceRepository {
|
export interface IMcpProfileRepository {
|
||||||
findAll(serverId?: string): Promise<McpInstance[]>;
|
findAll(serverId?: string): Promise<McpProfile[]>;
|
||||||
findById(id: string): Promise<McpInstance | null>;
|
findById(id: string): Promise<McpProfile | null>;
|
||||||
findByContainerId(containerId: string): Promise<McpInstance | null>;
|
findByServerAndName(serverId: string, name: string): Promise<McpProfile | null>;
|
||||||
create(data: { serverId: string; containerId?: string; status?: InstanceStatus; port?: number; metadata?: Record<string, unknown> }): Promise<McpInstance>;
|
create(data: CreateMcpProfileInput): Promise<McpProfile>;
|
||||||
updateStatus(id: string, status: InstanceStatus, fields?: { containerId?: string; port?: number; metadata?: Record<string, unknown>; healthStatus?: string; lastHealthCheck?: Date; events?: unknown[] }): Promise<McpInstance>;
|
update(id: string, data: UpdateMcpProfileInput): Promise<McpProfile>;
|
||||||
delete(id: string): Promise<void>;
|
delete(id: string): Promise<void>;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ISecretRepository {
|
|
||||||
findAll(): Promise<Secret[]>;
|
|
||||||
findById(id: string): Promise<Secret | null>;
|
|
||||||
findByName(name: string): Promise<Secret | null>;
|
|
||||||
create(data: CreateSecretInput): Promise<Secret>;
|
|
||||||
update(id: string, data: UpdateSecretInput): Promise<Secret>;
|
|
||||||
delete(id: string): Promise<void>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface AuditLogFilter {
|
|
||||||
userId?: string;
|
|
||||||
action?: string;
|
|
||||||
resource?: string;
|
|
||||||
resourceId?: string;
|
|
||||||
since?: Date;
|
|
||||||
until?: Date;
|
|
||||||
limit?: number;
|
|
||||||
offset?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface IAuditLogRepository {
|
|
||||||
findAll(filter?: AuditLogFilter): Promise<AuditLog[]>;
|
|
||||||
findById(id: string): Promise<AuditLog | null>;
|
|
||||||
create(data: { userId: string; action: string; resource: string; resourceId?: string; details?: Record<string, unknown> }): Promise<AuditLog>;
|
|
||||||
count(filter?: AuditLogFilter): Promise<number>;
|
|
||||||
deleteOlderThan(date: Date): Promise<number>;
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,84 +0,0 @@
|
|||||||
import type { PrismaClient, McpInstance, InstanceStatus, Prisma } from '@prisma/client';
|
|
||||||
import type { IMcpInstanceRepository } from './interfaces.js';
|
|
||||||
|
|
||||||
export class McpInstanceRepository implements IMcpInstanceRepository {
|
|
||||||
constructor(private prisma: PrismaClient) {}
|
|
||||||
|
|
||||||
async findAll(serverId?: string): Promise<McpInstance[]> {
|
|
||||||
const where: Prisma.McpInstanceWhereInput = {};
|
|
||||||
if (serverId) {
|
|
||||||
where.serverId = serverId;
|
|
||||||
}
|
|
||||||
return this.prisma.mcpInstance.findMany({
|
|
||||||
where,
|
|
||||||
include: { server: { select: { name: true } } },
|
|
||||||
orderBy: { createdAt: 'desc' },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async findById(id: string): Promise<McpInstance | null> {
|
|
||||||
return this.prisma.mcpInstance.findUnique({
|
|
||||||
where: { id },
|
|
||||||
include: { server: { select: { name: true } } },
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async findByContainerId(containerId: string): Promise<McpInstance | null> {
|
|
||||||
return this.prisma.mcpInstance.findFirst({ where: { containerId } });
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(data: {
|
|
||||||
serverId: string;
|
|
||||||
containerId?: string;
|
|
||||||
status?: InstanceStatus;
|
|
||||||
port?: number;
|
|
||||||
metadata?: Record<string, unknown>;
|
|
||||||
}): Promise<McpInstance> {
|
|
||||||
return this.prisma.mcpInstance.create({
|
|
||||||
data: {
|
|
||||||
serverId: data.serverId,
|
|
||||||
containerId: data.containerId ?? null,
|
|
||||||
status: data.status ?? 'STOPPED',
|
|
||||||
port: data.port ?? null,
|
|
||||||
metadata: (data.metadata ?? {}) as Prisma.InputJsonValue,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async updateStatus(
|
|
||||||
id: string,
|
|
||||||
status: InstanceStatus,
|
|
||||||
fields?: { containerId?: string; port?: number; metadata?: Record<string, unknown>; healthStatus?: string; lastHealthCheck?: Date; events?: unknown[] },
|
|
||||||
): Promise<McpInstance> {
|
|
||||||
const updateData: Prisma.McpInstanceUpdateInput = {
|
|
||||||
status,
|
|
||||||
version: { increment: 1 },
|
|
||||||
};
|
|
||||||
if (fields?.containerId !== undefined) {
|
|
||||||
updateData.containerId = fields.containerId;
|
|
||||||
}
|
|
||||||
if (fields?.port !== undefined) {
|
|
||||||
updateData.port = fields.port;
|
|
||||||
}
|
|
||||||
if (fields?.metadata !== undefined) {
|
|
||||||
updateData.metadata = fields.metadata as Prisma.InputJsonValue;
|
|
||||||
}
|
|
||||||
if (fields?.healthStatus !== undefined) {
|
|
||||||
updateData.healthStatus = fields.healthStatus;
|
|
||||||
}
|
|
||||||
if (fields?.lastHealthCheck !== undefined) {
|
|
||||||
updateData.lastHealthCheck = fields.lastHealthCheck;
|
|
||||||
}
|
|
||||||
if (fields?.events !== undefined) {
|
|
||||||
updateData.events = fields.events as unknown as Prisma.InputJsonValue;
|
|
||||||
}
|
|
||||||
return this.prisma.mcpInstance.update({
|
|
||||||
where: { id },
|
|
||||||
data: updateData,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(id: string): Promise<void> {
|
|
||||||
await this.prisma.mcpInstance.delete({ where: { id } });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
46
src/mcpd/src/repositories/mcp-profile.repository.ts
Normal file
46
src/mcpd/src/repositories/mcp-profile.repository.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import type { PrismaClient, McpProfile } from '@prisma/client';
|
||||||
|
import type { IMcpProfileRepository } from './interfaces.js';
|
||||||
|
import type { CreateMcpProfileInput, UpdateMcpProfileInput } from '../validation/mcp-profile.schema.js';
|
||||||
|
|
||||||
|
export class McpProfileRepository implements IMcpProfileRepository {
|
||||||
|
constructor(private readonly prisma: PrismaClient) {}
|
||||||
|
|
||||||
|
async findAll(serverId?: string): Promise<McpProfile[]> {
|
||||||
|
const where = serverId !== undefined ? { serverId } : {};
|
||||||
|
return this.prisma.mcpProfile.findMany({ where, orderBy: { name: 'asc' } });
|
||||||
|
}
|
||||||
|
|
||||||
|
async findById(id: string): Promise<McpProfile | null> {
|
||||||
|
return this.prisma.mcpProfile.findUnique({ where: { id } });
|
||||||
|
}
|
||||||
|
|
||||||
|
async findByServerAndName(serverId: string, name: string): Promise<McpProfile | null> {
|
||||||
|
return this.prisma.mcpProfile.findUnique({
|
||||||
|
where: { name_serverId: { name, serverId } },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async create(data: CreateMcpProfileInput): Promise<McpProfile> {
|
||||||
|
return this.prisma.mcpProfile.create({
|
||||||
|
data: {
|
||||||
|
name: data.name,
|
||||||
|
serverId: data.serverId,
|
||||||
|
permissions: data.permissions,
|
||||||
|
envOverrides: data.envOverrides,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async update(id: string, data: UpdateMcpProfileInput): Promise<McpProfile> {
|
||||||
|
const updateData: Record<string, unknown> = {};
|
||||||
|
if (data.name !== undefined) updateData['name'] = data.name;
|
||||||
|
if (data.permissions !== undefined) updateData['permissions'] = data.permissions;
|
||||||
|
if (data.envOverrides !== undefined) updateData['envOverrides'] = data.envOverrides;
|
||||||
|
|
||||||
|
return this.prisma.mcpProfile.update({ where: { id }, data: updateData });
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(id: string): Promise<void> {
|
||||||
|
await this.prisma.mcpProfile.delete({ where: { id } });
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import { type PrismaClient, type McpServer, Prisma } from '@prisma/client';
|
import type { PrismaClient, McpServer } from '@prisma/client';
|
||||||
import type { IMcpServerRepository } from './interfaces.js';
|
import type { IMcpServerRepository } from './interfaces.js';
|
||||||
import type { CreateMcpServerInput, UpdateMcpServerInput } from '../validation/mcp-server.schema.js';
|
import type { CreateMcpServerInput, UpdateMcpServerInput } from '../validation/mcp-server.schema.js';
|
||||||
|
|
||||||
@@ -26,12 +26,7 @@ export class McpServerRepository implements IMcpServerRepository {
|
|||||||
dockerImage: data.dockerImage ?? null,
|
dockerImage: data.dockerImage ?? null,
|
||||||
transport: data.transport,
|
transport: data.transport,
|
||||||
repositoryUrl: data.repositoryUrl ?? null,
|
repositoryUrl: data.repositoryUrl ?? null,
|
||||||
externalUrl: data.externalUrl ?? null,
|
envTemplate: data.envTemplate,
|
||||||
command: data.command ?? Prisma.DbNull,
|
|
||||||
containerPort: data.containerPort ?? null,
|
|
||||||
replicas: data.replicas,
|
|
||||||
env: data.env,
|
|
||||||
healthCheck: (data.healthCheck ?? Prisma.JsonNull) as Prisma.InputJsonValue,
|
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -43,12 +38,7 @@ export class McpServerRepository implements IMcpServerRepository {
|
|||||||
if (data.dockerImage !== undefined) updateData['dockerImage'] = data.dockerImage;
|
if (data.dockerImage !== undefined) updateData['dockerImage'] = data.dockerImage;
|
||||||
if (data.transport !== undefined) updateData['transport'] = data.transport;
|
if (data.transport !== undefined) updateData['transport'] = data.transport;
|
||||||
if (data.repositoryUrl !== undefined) updateData['repositoryUrl'] = data.repositoryUrl;
|
if (data.repositoryUrl !== undefined) updateData['repositoryUrl'] = data.repositoryUrl;
|
||||||
if (data.externalUrl !== undefined) updateData['externalUrl'] = data.externalUrl;
|
if (data.envTemplate !== undefined) updateData['envTemplate'] = data.envTemplate;
|
||||||
if (data.command !== undefined) updateData['command'] = data.command;
|
|
||||||
if (data.containerPort !== undefined) updateData['containerPort'] = data.containerPort;
|
|
||||||
if (data.replicas !== undefined) updateData['replicas'] = data.replicas;
|
|
||||||
if (data.env !== undefined) updateData['env'] = data.env;
|
|
||||||
if (data.healthCheck !== undefined) updateData['healthCheck'] = (data.healthCheck ?? Prisma.JsonNull) as Prisma.InputJsonValue;
|
|
||||||
|
|
||||||
return this.prisma.mcpServer.update({ where: { id }, data: updateData });
|
return this.prisma.mcpServer.update({ where: { id }, data: updateData });
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,91 +1,69 @@
|
|||||||
import type { PrismaClient, Project } from '@prisma/client';
|
import type { PrismaClient, Project } from '@prisma/client';
|
||||||
|
import type { CreateProjectInput, UpdateProjectInput } from '../validation/project.schema.js';
|
||||||
export interface ProjectWithRelations extends Project {
|
|
||||||
servers: Array<{ id: string; server: { id: string; name: string } }>;
|
|
||||||
}
|
|
||||||
|
|
||||||
const PROJECT_INCLUDE = {
|
|
||||||
servers: { include: { server: { select: { id: true, name: true } } } },
|
|
||||||
} as const;
|
|
||||||
|
|
||||||
export interface IProjectRepository {
|
export interface IProjectRepository {
|
||||||
findAll(ownerId?: string): Promise<ProjectWithRelations[]>;
|
findAll(ownerId?: string): Promise<Project[]>;
|
||||||
findById(id: string): Promise<ProjectWithRelations | null>;
|
findById(id: string): Promise<Project | null>;
|
||||||
findByName(name: string): Promise<ProjectWithRelations | null>;
|
findByName(name: string): Promise<Project | null>;
|
||||||
create(data: { name: string; description: string; ownerId: string; proxyMode: string; llmProvider?: string; llmModel?: string }): Promise<ProjectWithRelations>;
|
create(data: CreateProjectInput & { ownerId: string }): Promise<Project>;
|
||||||
update(id: string, data: Record<string, unknown>): Promise<ProjectWithRelations>;
|
update(id: string, data: UpdateProjectInput): Promise<Project>;
|
||||||
delete(id: string): Promise<void>;
|
delete(id: string): Promise<void>;
|
||||||
setServers(projectId: string, serverIds: string[]): Promise<void>;
|
setProfiles(projectId: string, profileIds: string[]): Promise<void>;
|
||||||
addServer(projectId: string, serverId: string): Promise<void>;
|
getProfileIds(projectId: string): Promise<string[]>;
|
||||||
removeServer(projectId: string, serverId: string): Promise<void>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export class ProjectRepository implements IProjectRepository {
|
export class ProjectRepository implements IProjectRepository {
|
||||||
constructor(private readonly prisma: PrismaClient) {}
|
constructor(private readonly prisma: PrismaClient) {}
|
||||||
|
|
||||||
async findAll(ownerId?: string): Promise<ProjectWithRelations[]> {
|
async findAll(ownerId?: string): Promise<Project[]> {
|
||||||
const where = ownerId !== undefined ? { ownerId } : {};
|
const where = ownerId !== undefined ? { ownerId } : {};
|
||||||
return this.prisma.project.findMany({ where, orderBy: { name: 'asc' }, include: PROJECT_INCLUDE }) as unknown as Promise<ProjectWithRelations[]>;
|
return this.prisma.project.findMany({ where, orderBy: { name: 'asc' } });
|
||||||
}
|
}
|
||||||
|
|
||||||
async findById(id: string): Promise<ProjectWithRelations | null> {
|
async findById(id: string): Promise<Project | null> {
|
||||||
return this.prisma.project.findUnique({ where: { id }, include: PROJECT_INCLUDE }) as unknown as Promise<ProjectWithRelations | null>;
|
return this.prisma.project.findUnique({ where: { id } });
|
||||||
}
|
}
|
||||||
|
|
||||||
async findByName(name: string): Promise<ProjectWithRelations | null> {
|
async findByName(name: string): Promise<Project | null> {
|
||||||
return this.prisma.project.findUnique({ where: { name }, include: PROJECT_INCLUDE }) as unknown as Promise<ProjectWithRelations | null>;
|
return this.prisma.project.findUnique({ where: { name } });
|
||||||
}
|
}
|
||||||
|
|
||||||
async create(data: { name: string; description: string; ownerId: string; proxyMode: string; llmProvider?: string; llmModel?: string }): Promise<ProjectWithRelations> {
|
async create(data: CreateProjectInput & { ownerId: string }): Promise<Project> {
|
||||||
const createData: Record<string, unknown> = {
|
|
||||||
name: data.name,
|
|
||||||
description: data.description,
|
|
||||||
ownerId: data.ownerId,
|
|
||||||
proxyMode: data.proxyMode,
|
|
||||||
};
|
|
||||||
if (data.llmProvider !== undefined) createData['llmProvider'] = data.llmProvider;
|
|
||||||
if (data.llmModel !== undefined) createData['llmModel'] = data.llmModel;
|
|
||||||
|
|
||||||
return this.prisma.project.create({
|
return this.prisma.project.create({
|
||||||
data: createData as Parameters<PrismaClient['project']['create']>[0]['data'],
|
data: {
|
||||||
include: PROJECT_INCLUDE,
|
name: data.name,
|
||||||
}) as unknown as Promise<ProjectWithRelations>;
|
description: data.description,
|
||||||
|
ownerId: data.ownerId,
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async update(id: string, data: Record<string, unknown>): Promise<ProjectWithRelations> {
|
async update(id: string, data: UpdateProjectInput): Promise<Project> {
|
||||||
return this.prisma.project.update({
|
const updateData: Record<string, unknown> = {};
|
||||||
where: { id },
|
if (data.description !== undefined) updateData['description'] = data.description;
|
||||||
data,
|
return this.prisma.project.update({ where: { id }, data: updateData });
|
||||||
include: PROJECT_INCLUDE,
|
|
||||||
}) as unknown as Promise<ProjectWithRelations>;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async delete(id: string): Promise<void> {
|
async delete(id: string): Promise<void> {
|
||||||
await this.prisma.project.delete({ where: { id } });
|
await this.prisma.project.delete({ where: { id } });
|
||||||
}
|
}
|
||||||
|
|
||||||
async setServers(projectId: string, serverIds: string[]): Promise<void> {
|
async setProfiles(projectId: string, profileIds: string[]): Promise<void> {
|
||||||
await this.prisma.$transaction(async (tx) => {
|
await this.prisma.$transaction([
|
||||||
await tx.projectServer.deleteMany({ where: { projectId } });
|
this.prisma.projectMcpProfile.deleteMany({ where: { projectId } }),
|
||||||
if (serverIds.length > 0) {
|
...profileIds.map((profileId) =>
|
||||||
await tx.projectServer.createMany({
|
this.prisma.projectMcpProfile.create({
|
||||||
data: serverIds.map((serverId) => ({ projectId, serverId })),
|
data: { projectId, profileId },
|
||||||
});
|
}),
|
||||||
}
|
),
|
||||||
});
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
async addServer(projectId: string, serverId: string): Promise<void> {
|
async getProfileIds(projectId: string): Promise<string[]> {
|
||||||
await this.prisma.projectServer.upsert({
|
const links = await this.prisma.projectMcpProfile.findMany({
|
||||||
where: { projectId_serverId: { projectId, serverId } },
|
where: { projectId },
|
||||||
create: { projectId, serverId },
|
select: { profileId: true },
|
||||||
update: {},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async removeServer(projectId: string, serverId: string): Promise<void> {
|
|
||||||
await this.prisma.projectServer.deleteMany({
|
|
||||||
where: { projectId, serverId },
|
|
||||||
});
|
});
|
||||||
|
return links.map((l) => l.profileId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,48 +0,0 @@
|
|||||||
import type { PrismaClient, RbacDefinition } from '@prisma/client';
|
|
||||||
import type { CreateRbacDefinitionInput, UpdateRbacDefinitionInput } from '../validation/rbac-definition.schema.js';
|
|
||||||
|
|
||||||
export interface IRbacDefinitionRepository {
|
|
||||||
findAll(): Promise<RbacDefinition[]>;
|
|
||||||
findById(id: string): Promise<RbacDefinition | null>;
|
|
||||||
findByName(name: string): Promise<RbacDefinition | null>;
|
|
||||||
create(data: CreateRbacDefinitionInput): Promise<RbacDefinition>;
|
|
||||||
update(id: string, data: UpdateRbacDefinitionInput): Promise<RbacDefinition>;
|
|
||||||
delete(id: string): Promise<void>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class RbacDefinitionRepository implements IRbacDefinitionRepository {
|
|
||||||
constructor(private readonly prisma: PrismaClient) {}
|
|
||||||
|
|
||||||
async findAll(): Promise<RbacDefinition[]> {
|
|
||||||
return this.prisma.rbacDefinition.findMany({ orderBy: { name: 'asc' } });
|
|
||||||
}
|
|
||||||
|
|
||||||
async findById(id: string): Promise<RbacDefinition | null> {
|
|
||||||
return this.prisma.rbacDefinition.findUnique({ where: { id } });
|
|
||||||
}
|
|
||||||
|
|
||||||
async findByName(name: string): Promise<RbacDefinition | null> {
|
|
||||||
return this.prisma.rbacDefinition.findUnique({ where: { name } });
|
|
||||||
}
|
|
||||||
|
|
||||||
async create(data: CreateRbacDefinitionInput): Promise<RbacDefinition> {
|
|
||||||
return this.prisma.rbacDefinition.create({
|
|
||||||
data: {
|
|
||||||
name: data.name,
|
|
||||||
subjects: data.subjects,
|
|
||||||
roleBindings: data.roleBindings,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async update(id: string, data: UpdateRbacDefinitionInput): Promise<RbacDefinition> {
|
|
||||||
const updateData: Record<string, unknown> = {};
|
|
||||||
if (data.subjects !== undefined) updateData['subjects'] = data.subjects;
|
|
||||||
if (data.roleBindings !== undefined) updateData['roleBindings'] = data.roleBindings;
|
|
||||||
return this.prisma.rbacDefinition.update({ where: { id }, data: updateData });
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(id: string): Promise<void> {
|
|
||||||
await this.prisma.rbacDefinition.delete({ where: { id } });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user