Compare commits
66 Commits
feat/mcp-r
...
fix/db-tes
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3a6e58274c | ||
|
|
c819b65175 | ||
|
|
c3ef5a664f | ||
|
|
4c2927a16e | ||
| 79dd6e723d | |||
|
|
cde1c59fd6 | ||
| daa5860ed2 | |||
|
|
ecbf48dd49 | ||
| d38b5aac60 | |||
|
|
d07d4d11dd | ||
| fa58c1b5ed | |||
|
|
dd1dfc629d | ||
| 7b3dab142e | |||
|
|
4c127a7dc3 | ||
| c1e3e4aed6 | |||
|
|
e45c6079c1 | ||
| e4aef3acf1 | |||
|
|
a2cda38850 | ||
| 081e90de0f | |||
|
|
4e3d896ef6 | ||
| 0823e965bf | |||
|
|
c97219f85e | ||
| 93adcd4be7 | |||
|
|
d58e6e153f | ||
|
|
1e8847bb63 | ||
|
|
2a0deaa225 | ||
| 4eef6e38a2 | |||
|
|
ca02340a4c | ||
|
|
02254f2aac | ||
|
|
540dd6fd63 | ||
| a05a4c4816 | |||
|
|
97ade470df | ||
|
|
b25ff98374 | ||
|
|
22fe9c3435 | ||
| 72643fceda | |||
|
|
467357c2c6 | ||
| d6a80fc03d | |||
|
|
c07da826a0 | ||
|
|
0482944056 | ||
| 46e07e4515 | |||
|
|
b8c5cf718a | ||
|
|
a4fe5fdbe2 | ||
|
|
e1ed585e2a | ||
|
|
48fce7fe45 | ||
|
|
89b2b1b13d | ||
|
|
6da4ae495c | ||
|
|
9a67e51307 | ||
|
|
9e660140b3 | ||
|
|
d0a224e839 | ||
|
|
6161686441 | ||
|
|
3ee0dbe58e | ||
|
|
a520b9ff47 | ||
|
|
9c08faa8d2 | ||
|
|
dbb2fe63cd | ||
|
|
4d796e2aa7 | ||
|
|
7c07749580 | ||
|
|
09675f020f | ||
|
|
4b67a9cc15 | ||
|
|
1b8b886995 | ||
|
|
d1390313a3 | ||
|
|
0ff5c85cf6 | ||
|
|
3fa2bc5ffa | ||
|
|
47f10f62c7 | ||
|
|
247b4967e5 | ||
|
|
dc45f5981b | ||
| f5fae2936a |
15
.dockerignore
Normal file
15
.dockerignore
Normal file
@@ -0,0 +1,15 @@
|
||||
node_modules
|
||||
*/node_modules
|
||||
**/node_modules
|
||||
dist
|
||||
**/dist
|
||||
.git
|
||||
.taskmaster
|
||||
.claude
|
||||
*.md
|
||||
!pnpm-workspace.yaml
|
||||
.env
|
||||
.env.*
|
||||
deploy/docker-compose.yml
|
||||
src/cli
|
||||
src/mcplocal
|
||||
142
.gitea/workflows/ci.yml
Normal file
142
.gitea/workflows/ci.yml
Normal file
@@ -0,0 +1,142 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: pnpm
|
||||
|
||||
- run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Lint
|
||||
run: pnpm lint
|
||||
|
||||
typecheck:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: pnpm
|
||||
|
||||
- run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Generate Prisma client
|
||||
run: pnpm --filter @mcpctl/db exec prisma generate
|
||||
|
||||
- name: Typecheck
|
||||
run: pnpm typecheck
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: pnpm
|
||||
|
||||
- run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Generate Prisma client
|
||||
run: pnpm --filter @mcpctl/db exec prisma generate
|
||||
|
||||
- name: Run tests
|
||||
run: pnpm test:run
|
||||
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint, typecheck, test]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: pnpm
|
||||
|
||||
- run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Generate Prisma client
|
||||
run: pnpm --filter @mcpctl/db exec prisma generate
|
||||
|
||||
- name: Build all packages
|
||||
run: pnpm build
|
||||
|
||||
package:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build]
|
||||
if: github.ref == 'refs/heads/main' && github.event_name == 'push'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
cache: pnpm
|
||||
|
||||
- run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Generate Prisma client
|
||||
run: pnpm --filter @mcpctl/db exec prisma generate
|
||||
|
||||
- name: Build TypeScript
|
||||
run: pnpm build
|
||||
|
||||
- name: Install bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
|
||||
- name: Install nfpm
|
||||
run: |
|
||||
curl -sL -o /tmp/nfpm.tar.gz "https://github.com/goreleaser/nfpm/releases/download/v2.45.0/nfpm_2.45.0_Linux_x86_64.tar.gz"
|
||||
tar xzf /tmp/nfpm.tar.gz -C /usr/local/bin nfpm
|
||||
|
||||
- name: Bundle standalone binary
|
||||
run: bun build src/cli/src/index.ts --compile --outfile dist/mcpctl
|
||||
|
||||
- name: Build RPM
|
||||
run: nfpm pkg --packager rpm --target dist/
|
||||
|
||||
- name: Publish to Gitea packages
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||
run: |
|
||||
RPM_FILE=$(ls dist/mcpctl-*.rpm | head -1)
|
||||
curl --fail -X PUT \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
--upload-file "$RPM_FILE" \
|
||||
"${{ github.server_url }}/api/packages/${{ github.repository_owner }}/rpm/upload"
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -9,6 +9,8 @@ dist/
|
||||
.env
|
||||
.env.local
|
||||
.env.*.local
|
||||
stack/.env
|
||||
.portainer_password
|
||||
|
||||
# Logs
|
||||
logs/
|
||||
@@ -35,3 +37,4 @@ pgdata/
|
||||
|
||||
# Prisma
|
||||
src/db/prisma/migrations/*.sql.backup
|
||||
logs.sh
|
||||
|
||||
272
.taskmaster/docs/prd-v2-architecture.md
Normal file
272
.taskmaster/docs/prd-v2-architecture.md
Normal file
@@ -0,0 +1,272 @@
|
||||
# mcpctl v2 - Corrected 3-Tier Architecture PRD
|
||||
|
||||
## Overview
|
||||
|
||||
mcpctl is a kubectl-inspired system for managing MCP (Model Context Protocol) servers. It consists of 4 components arranged in a 3-tier architecture:
|
||||
|
||||
```
|
||||
Claude Code
|
||||
|
|
||||
v (stdio - MCP protocol)
|
||||
mcplocal (Local Daemon - runs on developer machine)
|
||||
|
|
||||
v (HTTP REST)
|
||||
mcpd (External Daemon - runs on server/NAS)
|
||||
|
|
||||
v (Docker API / K8s API)
|
||||
mcp_servers (MCP server containers)
|
||||
```
|
||||
|
||||
## Components
|
||||
|
||||
### 1. mcpctl (CLI Tool)
|
||||
- **Package**: `src/cli/` (`@mcpctl/cli`)
|
||||
- **What it is**: kubectl-like CLI for managing the entire system
|
||||
- **Talks to**: mcplocal (local daemon) via HTTP REST
|
||||
- **Key point**: mcpctl does NOT talk to mcpd directly. It always goes through mcplocal.
|
||||
- **Distributed as**: RPM package via Gitea registry (bun compile + nfpm)
|
||||
- **Commands**: get, describe, apply, setup, instance, claude, project, backup, restore, config, status
|
||||
|
||||
### 2. mcplocal (Local Daemon)
|
||||
- **Package**: `src/local-proxy/` (rename to `src/mcplocal/`)
|
||||
- **What it is**: Local daemon running on the developer's machine
|
||||
- **Talks to**: mcpd (external daemon) via HTTP REST
|
||||
- **Exposes to Claude**: MCP protocol via stdio (tools, resources, prompts)
|
||||
- **Exposes to mcpctl**: HTTP REST API for management commands
|
||||
|
||||
**Core responsibility: LLM Pre-processing**
|
||||
|
||||
This is the intelligence layer. When Claude asks for data from MCP servers, mcplocal:
|
||||
|
||||
1. Receives Claude's request (e.g., "get Slack messages about security")
|
||||
2. Uses a local/cheap LLM (Gemini CLI binary, Ollama, vLLM, DeepSeek API) to interpret what Claude actually wants
|
||||
3. Sends narrow, filtered requests to mcpd which forwards to the actual MCP servers
|
||||
4. Receives raw results from MCP servers (via mcpd)
|
||||
5. Uses the local LLM again to filter/summarize results - extracting only what's relevant
|
||||
6. Returns the smallest, most comprehensive response to Claude
|
||||
|
||||
**Why**: Claude Code tokens are expensive. Instead of dumping 500 Slack messages into Claude's context window, mcplocal uses a cheap LLM to pre-filter to the 12 relevant ones.
|
||||
|
||||
**LLM Provider Strategy** (already partially exists):
|
||||
- Gemini CLI binary (local, free)
|
||||
- Ollama (local, free)
|
||||
- vLLM (local, free)
|
||||
- DeepSeek API (cheap)
|
||||
- OpenAI API (fallback)
|
||||
- Anthropic API (fallback)
|
||||
|
||||
**Additional mcplocal responsibilities**:
|
||||
- MCP protocol routing (namespace tools: `slack/send_message`, `jira/create_issue`)
|
||||
- Connection health monitoring for upstream MCP servers
|
||||
- Caching frequently requested data
|
||||
- Proxying mcpctl management commands to mcpd
|
||||
|
||||
### 3. mcpd (External Daemon)
|
||||
- **Package**: `src/mcpd/` (`@mcpctl/mcpd`)
|
||||
- **What it is**: Server-side daemon that runs on centralized infrastructure (Synology NAS, cloud server, etc.)
|
||||
- **Deployed via**: Docker Compose (Dockerfile + docker-compose.yml)
|
||||
- **Database**: PostgreSQL for state, audit logs, access control
|
||||
|
||||
**Core responsibilities**:
|
||||
- **Deploy and run MCP server containers** (Docker now, Kubernetes later)
|
||||
- **Instance lifecycle management**: start, stop, restart, logs, inspect
|
||||
- **MCP server registry**: Store server definitions, configuration templates, profiles
|
||||
- **Project management**: Group MCP profiles into projects for Claude sessions
|
||||
- **Auditing**: Log every operation - who ran what, when, with what result
|
||||
- **Access management**: Users, sessions, permissions - who can access which MCP servers
|
||||
- **Credential storage**: MCP servers often need API tokens (Slack, Jira, GitHub) - stored securely on server side, never exposed to local machine
|
||||
- **Backup/restore**: Export and import configuration
|
||||
|
||||
**Key point**: mcpd holds the credentials. When mcplocal asks mcpd to query Slack, mcpd runs the Slack MCP server container with the proper SLACK_TOKEN injected - mcplocal never sees the token.
|
||||
|
||||
### 4. mcp_servers (MCP Server Containers)
|
||||
- **What they are**: The actual MCP server processes (Slack, Jira, GitHub, Terraform, filesystem, postgres, etc.)
|
||||
- **Managed by**: mcpd via Docker/Podman API
|
||||
- **Network**: Isolated network, only accessible by mcpd
|
||||
- **Credentials**: Injected by mcpd as environment variables
|
||||
- **Communication**: MCP protocol (stdio or SSE/HTTP) between mcpd and the containers
|
||||
|
||||
## Data Flow Examples
|
||||
|
||||
### Example 1: Claude asks for Slack messages
|
||||
```
|
||||
Claude: "Get messages about security incidents from the last week"
|
||||
|
|
||||
v (MCP tools/call: slack/search_messages)
|
||||
mcplocal:
|
||||
1. Intercepts the tool call
|
||||
2. Calls local Gemini: "User wants security incident messages from last week.
|
||||
Generate optimal Slack search query and date filters."
|
||||
3. Gemini returns: query="security incident OR vulnerability OR CVE", after="2024-01-15"
|
||||
4. Sends filtered request to mcpd
|
||||
|
|
||||
v (HTTP POST /api/v1/mcp/proxy)
|
||||
mcpd:
|
||||
1. Looks up Slack MCP instance (injects SLACK_TOKEN)
|
||||
2. Forwards narrowed query to Slack MCP server container
|
||||
3. Returns raw results (200 messages)
|
||||
|
|
||||
v (response)
|
||||
mcplocal:
|
||||
1. Receives 200 messages
|
||||
2. Calls local Gemini: "Filter these 200 Slack messages. Keep only those
|
||||
directly about security incidents. Return message IDs and 1-line summaries."
|
||||
3. Gemini returns: 15 relevant messages with summaries
|
||||
4. Returns filtered result to Claude
|
||||
|
|
||||
v (MCP response: 15 messages instead of 200)
|
||||
Claude: processes only the relevant 15 messages
|
||||
```
|
||||
|
||||
### Example 2: mcpctl management command
|
||||
```
|
||||
$ mcpctl get servers
|
||||
|
|
||||
v (HTTP GET)
|
||||
mcplocal:
|
||||
1. Recognizes this is a management command (not MCP data)
|
||||
2. Proxies directly to mcpd (no LLM processing needed)
|
||||
|
|
||||
v (HTTP GET /api/v1/servers)
|
||||
mcpd:
|
||||
1. Queries PostgreSQL for server definitions
|
||||
2. Returns list
|
||||
|
|
||||
v (proxied response)
|
||||
mcplocal -> mcpctl -> formatted table output
|
||||
```
|
||||
|
||||
### Example 3: mcpctl instance management
|
||||
```
|
||||
$ mcpctl instance start slack
|
||||
|
|
||||
v
|
||||
mcplocal -> mcpd:
|
||||
1. Creates Docker container for Slack MCP server
|
||||
2. Injects SLACK_TOKEN from secure storage
|
||||
3. Connects to isolated mcp-servers network
|
||||
4. Logs audit entry: "user X started slack instance"
|
||||
5. Returns instance status
|
||||
```
|
||||
|
||||
## What Already Exists (completed work)
|
||||
|
||||
### Done and reusable as-is:
|
||||
- Project structure: pnpm monorepo, TypeScript strict mode, Vitest, ESLint
|
||||
- Database schema: Prisma + PostgreSQL (User, McpServer, McpProfile, Project, McpInstance, AuditLog)
|
||||
- mcpd server framework: Fastify 5, routes, services, repositories, middleware
|
||||
- mcpd MCP server CRUD: registration, profiles, projects
|
||||
- mcpd Docker container management: dockerode, instance lifecycle
|
||||
- mcpd audit logging, health monitoring, metrics, backup/restore
|
||||
- mcpctl CLI framework: Commander.js, commands, config, API client, formatters
|
||||
- mcpctl RPM distribution: bun compile, nfpm, Gitea publishing, shell completions
|
||||
- MCP protocol routing in local-proxy: namespace tools, resources, prompts
|
||||
- LLM provider abstractions: OpenAI, Anthropic, Ollama adapters (defined but unused)
|
||||
- Shared types and profile templates
|
||||
|
||||
### Needs rework:
|
||||
- mcpctl currently talks to mcpd directly -> must talk to mcplocal instead
|
||||
- local-proxy is just a dumb router -> needs LLM pre-processing intelligence
|
||||
- local-proxy has no HTTP API for mcpctl -> needs REST endpoints for management proxying
|
||||
- mcpd has no MCP proxy endpoint -> needs endpoint that mcplocal can call to execute MCP tool calls on managed instances
|
||||
- No integration between LLM providers and MCP request/response pipeline
|
||||
|
||||
## New Tasks Needed
|
||||
|
||||
### Phase 1: Rename and restructure local-proxy -> mcplocal
|
||||
- Rename `src/local-proxy/` to `src/mcplocal/`
|
||||
- Update all package references and imports
|
||||
- Add HTTP REST server (Fastify) alongside existing stdio server
|
||||
- mcplocal needs TWO interfaces: stdio for Claude, HTTP for mcpctl
|
||||
|
||||
### Phase 2: mcplocal management proxy
|
||||
- Add REST endpoints that mirror mcpd's API (get servers, instances, projects, etc.)
|
||||
- mcpctl config changes: `daemonUrl` now points to mcplocal (e.g., localhost:3200) instead of mcpd
|
||||
- mcplocal proxies management requests to mcpd (configurable `mcpdUrl` e.g., http://nas:3100)
|
||||
- Pass-through with no LLM processing for management commands
|
||||
|
||||
### Phase 3: mcpd MCP proxy endpoint
|
||||
- Add `/api/v1/mcp/proxy` endpoint to mcpd
|
||||
- Accepts: `{ serverId, method, params }` - execute an MCP tool call on a managed instance
|
||||
- mcpd looks up the instance, connects to the container, executes the MCP call, returns result
|
||||
- This is how mcplocal talks to MCP servers without needing direct Docker access
|
||||
|
||||
### Phase 4: LLM pre-processing pipeline in mcplocal
|
||||
- Create request interceptor in mcplocal's MCP router
|
||||
- Before forwarding `tools/call` to mcpd, run the request through LLM for interpretation
|
||||
- After receiving response from mcpd, run through LLM for filtering/summarization
|
||||
- LLM provider selection based on config (prefer local/cheap models)
|
||||
- Configurable: enable/disable pre-processing per server or per tool
|
||||
- Bypass for simple operations (list, create, delete - no filtering needed)
|
||||
|
||||
### Phase 5: Smart context optimization
|
||||
- Token counting: estimate how many tokens the raw response would consume
|
||||
- Decision logic: if raw response < threshold, skip LLM filtering (not worth the latency)
|
||||
- If raw response > threshold, filter with LLM
|
||||
- Cache LLM filtering decisions for repeated similar queries
|
||||
- Metrics: track tokens saved, latency added by filtering
|
||||
|
||||
### Phase 6: mcpctl -> mcplocal migration
|
||||
- Update mcpctl's default daemonUrl to point to mcplocal (localhost:3200)
|
||||
- Update all CLI commands to work through mcplocal proxy
|
||||
- Add `mcpctl config set mcpd-url <url>` for configuring upstream mcpd
|
||||
- Add `mcpctl config set mcplocal-url <url>` for configuring local daemon
|
||||
- Health check: `mcpctl status` shows both mcplocal and mcpd connectivity
|
||||
- Shell completions update if needed
|
||||
|
||||
### Phase 7: End-to-end integration testing
|
||||
- Test full flow: mcpctl -> mcplocal -> mcpd -> mcp_server -> response -> LLM filter -> Claude
|
||||
- Test management commands pass through correctly
|
||||
- Test LLM pre-processing reduces context window size
|
||||
- Test credential isolation (mcplocal never sees MCP server credentials)
|
||||
- Test health monitoring across all tiers
|
||||
|
||||
## Authentication & Authorization
|
||||
|
||||
### Database ownership
|
||||
- **mcpd owns the database** (PostgreSQL). It is the only component that talks to the DB.
|
||||
- mcplocal has NO database. It is stateless (config file only).
|
||||
- mcpctl has NO database. It stores user credentials locally in `~/.mcpctl/config.yaml`.
|
||||
|
||||
### Auth flow
|
||||
```
|
||||
mcpctl login
|
||||
|
|
||||
v (user enters mcpd URL + credentials)
|
||||
mcpctl stores API token in ~/.mcpctl/config.yaml
|
||||
|
|
||||
v (passes token to mcplocal config)
|
||||
mcplocal authenticates to mcpd using Bearer token on every request
|
||||
|
|
||||
v (Authorization: Bearer <token>)
|
||||
mcpd validates token against Session table in PostgreSQL
|
||||
|
|
||||
v (authenticated request proceeds)
|
||||
```
|
||||
|
||||
### mcpctl responsibilities
|
||||
- `mcpctl login` command: prompts user for mcpd URL and credentials (username/password or API token)
|
||||
- `mcpctl login` calls mcpd's auth endpoint to get a session token
|
||||
- Stores the token in `~/.mcpctl/config.yaml` (or `~/.mcpctl/credentials` with restricted permissions)
|
||||
- Passes the token to mcplocal (either via config or as startup argument)
|
||||
- `mcpctl logout` command: invalidates the session token
|
||||
|
||||
### mcplocal responsibilities
|
||||
- Reads auth token from its config (set by mcpctl)
|
||||
- Attaches `Authorization: Bearer <token>` header to ALL requests to mcpd
|
||||
- If mcpd returns 401, mcplocal returns appropriate error to mcpctl/Claude
|
||||
- Does NOT store credentials itself - they come from mcpctl's config
|
||||
|
||||
### mcpd responsibilities
|
||||
- Owns User and Session tables
|
||||
- Provides auth endpoints: `POST /api/v1/auth/login`, `POST /api/v1/auth/logout`
|
||||
- Validates Bearer tokens on every request via auth middleware (already exists)
|
||||
- Returns 401 for invalid/expired tokens
|
||||
- Audit logs include the authenticated user
|
||||
|
||||
## Non-functional Requirements
|
||||
- mcplocal must start fast (developer's machine, runs per-session or as daemon)
|
||||
- LLM pre-processing must not add more than 2-3 seconds latency
|
||||
- If local LLM is unavailable, fall back to passing data through unfiltered
|
||||
- All components must be independently deployable and testable
|
||||
- mcpd must remain stateless (outside of DB) and horizontally scalable
|
||||
File diff suppressed because one or more lines are too long
69
cli-buildrelease.sh
Executable file
69
cli-buildrelease.sh
Executable file
@@ -0,0 +1,69 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# Load .env if present
|
||||
if [ -f .env ]; then
|
||||
set -a; source .env; set +a
|
||||
fi
|
||||
|
||||
# Ensure tools are on PATH
|
||||
export PATH="$HOME/.npm-global/bin:$HOME/.bun/bin:$HOME/.local/bin:$PATH"
|
||||
|
||||
echo "=== mcpctl CLI build & release ==="
|
||||
echo ""
|
||||
|
||||
# 1. Build TypeScript
|
||||
echo "==> Building TypeScript..."
|
||||
pnpm build
|
||||
|
||||
# 2. Bundle standalone binary
|
||||
echo "==> Bundling standalone binary..."
|
||||
mkdir -p dist
|
||||
rm -f dist/mcpctl dist/mcpctl-*.rpm
|
||||
bun build src/cli/src/index.ts --compile --outfile dist/mcpctl
|
||||
echo " Binary: $(du -h dist/mcpctl | cut -f1)"
|
||||
|
||||
# 3. Package RPM
|
||||
echo "==> Packaging RPM..."
|
||||
nfpm pkg --packager rpm --target dist/
|
||||
RPM_FILE=$(ls dist/mcpctl-*.rpm 2>/dev/null | head -1)
|
||||
RPM_VERSION=$(rpm -qp --queryformat '%{VERSION}-%{RELEASE}' "$RPM_FILE")
|
||||
echo " RPM: $RPM_FILE ($(du -h "$RPM_FILE" | cut -f1))"
|
||||
|
||||
# 4. Publish to Gitea
|
||||
GITEA_URL="${GITEA_URL:-http://10.0.0.194:3012}"
|
||||
GITEA_OWNER="${GITEA_OWNER:-michal}"
|
||||
|
||||
if [ -z "$GITEA_TOKEN" ]; then
|
||||
echo ""
|
||||
echo "WARNING: GITEA_TOKEN not set, skipping publish. Add it to .env"
|
||||
echo ""
|
||||
else
|
||||
echo "==> Publishing to ${GITEA_URL}..."
|
||||
EXISTING=$(curl -s -o /dev/null -w "%{http_code}" \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
"${GITEA_URL}/api/v1/packages/${GITEA_OWNER}/rpm/mcpctl/${RPM_VERSION}")
|
||||
|
||||
if [ "$EXISTING" = "200" ]; then
|
||||
echo " Replacing existing version $RPM_VERSION..."
|
||||
curl -s -o /dev/null -X DELETE \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
"${GITEA_URL}/api/v1/packages/${GITEA_OWNER}/rpm/mcpctl/${RPM_VERSION}"
|
||||
fi
|
||||
|
||||
curl --fail -s -X PUT \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
--upload-file "$RPM_FILE" \
|
||||
"${GITEA_URL}/api/packages/${GITEA_OWNER}/rpm/upload"
|
||||
echo " Published!"
|
||||
fi
|
||||
|
||||
# 5. Install locally
|
||||
echo "==> Installing..."
|
||||
sudo rpm -U --force "$RPM_FILE"
|
||||
|
||||
echo ""
|
||||
echo "=== Done ==="
|
||||
mcpctl --version
|
||||
93
completions/mcpctl.bash
Normal file
93
completions/mcpctl.bash
Normal file
@@ -0,0 +1,93 @@
|
||||
_mcpctl() {
|
||||
local cur prev words cword
|
||||
_init_completion || return
|
||||
|
||||
local commands="config status get describe instance instances apply setup claude project projects backup restore help"
|
||||
local global_opts="-v --version -o --output --daemon-url -h --help"
|
||||
local resources="servers profiles projects instances"
|
||||
|
||||
case "${words[1]}" in
|
||||
config)
|
||||
COMPREPLY=($(compgen -W "view set path reset help" -- "$cur"))
|
||||
return ;;
|
||||
status)
|
||||
COMPREPLY=($(compgen -W "--daemon-url -h --help" -- "$cur"))
|
||||
return ;;
|
||||
get)
|
||||
if [[ $cword -eq 2 ]]; then
|
||||
COMPREPLY=($(compgen -W "$resources" -- "$cur"))
|
||||
else
|
||||
COMPREPLY=($(compgen -W "-o --output --daemon-url -h --help" -- "$cur"))
|
||||
fi
|
||||
return ;;
|
||||
describe)
|
||||
if [[ $cword -eq 2 ]]; then
|
||||
COMPREPLY=($(compgen -W "$resources" -- "$cur"))
|
||||
else
|
||||
COMPREPLY=($(compgen -W "-o --output --daemon-url -h --help" -- "$cur"))
|
||||
fi
|
||||
return ;;
|
||||
instance|instances)
|
||||
if [[ $cword -eq 2 ]]; then
|
||||
COMPREPLY=($(compgen -W "list ls start stop restart remove rm logs inspect help" -- "$cur"))
|
||||
else
|
||||
case "${words[2]}" in
|
||||
logs)
|
||||
COMPREPLY=($(compgen -W "--tail --since -h --help" -- "$cur"))
|
||||
;;
|
||||
start)
|
||||
COMPREPLY=($(compgen -W "--env --image -h --help" -- "$cur"))
|
||||
;;
|
||||
list|ls)
|
||||
COMPREPLY=($(compgen -W "--server-id -o --output -h --help" -- "$cur"))
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
return ;;
|
||||
claude)
|
||||
if [[ $cword -eq 2 ]]; then
|
||||
COMPREPLY=($(compgen -W "generate show add remove help" -- "$cur"))
|
||||
else
|
||||
case "${words[2]}" in
|
||||
generate|show|add|remove)
|
||||
COMPREPLY=($(compgen -W "--path -p -h --help" -- "$cur"))
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
return ;;
|
||||
project|projects)
|
||||
if [[ $cword -eq 2 ]]; then
|
||||
COMPREPLY=($(compgen -W "list ls create delete rm show profiles set-profiles help" -- "$cur"))
|
||||
else
|
||||
case "${words[2]}" in
|
||||
create)
|
||||
COMPREPLY=($(compgen -W "--description -d -h --help" -- "$cur"))
|
||||
;;
|
||||
list|ls)
|
||||
COMPREPLY=($(compgen -W "-o --output -h --help" -- "$cur"))
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
return ;;
|
||||
apply)
|
||||
COMPREPLY=($(compgen -f -- "$cur"))
|
||||
return ;;
|
||||
backup)
|
||||
COMPREPLY=($(compgen -W "-o --output -p --password -r --resources -h --help" -- "$cur"))
|
||||
return ;;
|
||||
restore)
|
||||
COMPREPLY=($(compgen -W "-i --input -p --password -c --conflict -h --help" -- "$cur"))
|
||||
return ;;
|
||||
setup)
|
||||
return ;;
|
||||
help)
|
||||
COMPREPLY=($(compgen -W "$commands" -- "$cur"))
|
||||
return ;;
|
||||
esac
|
||||
|
||||
if [[ $cword -eq 1 ]]; then
|
||||
COMPREPLY=($(compgen -W "$commands $global_opts" -- "$cur"))
|
||||
fi
|
||||
}
|
||||
|
||||
complete -F _mcpctl mcpctl
|
||||
81
completions/mcpctl.fish
Normal file
81
completions/mcpctl.fish
Normal file
@@ -0,0 +1,81 @@
|
||||
# mcpctl fish completions
|
||||
|
||||
set -l commands config status get describe instance instances apply setup claude project projects backup restore help
|
||||
|
||||
# Disable file completions by default
|
||||
complete -c mcpctl -f
|
||||
|
||||
# Global options
|
||||
complete -c mcpctl -s v -l version -d 'Show version'
|
||||
complete -c mcpctl -s o -l output -d 'Output format' -xa 'table json yaml'
|
||||
complete -c mcpctl -l daemon-url -d 'mcpd daemon URL' -x
|
||||
complete -c mcpctl -s h -l help -d 'Show help'
|
||||
|
||||
# Top-level commands
|
||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a config -d 'Manage configuration'
|
||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a status -d 'Show status and connectivity'
|
||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a get -d 'List resources'
|
||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a describe -d 'Show resource details'
|
||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a instance -d 'Manage instances'
|
||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a apply -d 'Apply configuration from file'
|
||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a setup -d 'Interactive setup wizard'
|
||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a claude -d 'Manage Claude .mcp.json'
|
||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a project -d 'Manage projects'
|
||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a backup -d 'Backup configuration'
|
||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a restore -d 'Restore from backup'
|
||||
complete -c mcpctl -n "not __fish_seen_subcommand_from $commands" -a help -d 'Show help'
|
||||
|
||||
# get/describe resources
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from get describe" -a 'servers profiles projects instances' -d 'Resource type'
|
||||
|
||||
# config subcommands
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from view set path reset" -a view -d 'Show configuration'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from view set path reset" -a set -d 'Set a config value'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from view set path reset" -a path -d 'Show config file path'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from view set path reset" -a reset -d 'Reset to defaults'
|
||||
|
||||
# instance subcommands
|
||||
set -l instance_cmds list ls start stop restart remove rm logs inspect
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from instance instances; and not __fish_seen_subcommand_from $instance_cmds" -a list -d 'List instances'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from instance instances; and not __fish_seen_subcommand_from $instance_cmds" -a start -d 'Start instance'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from instance instances; and not __fish_seen_subcommand_from $instance_cmds" -a stop -d 'Stop instance'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from instance instances; and not __fish_seen_subcommand_from $instance_cmds" -a restart -d 'Restart instance'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from instance instances; and not __fish_seen_subcommand_from $instance_cmds" -a remove -d 'Remove instance'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from instance instances; and not __fish_seen_subcommand_from $instance_cmds" -a logs -d 'Get logs'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from instance instances; and not __fish_seen_subcommand_from $instance_cmds" -a inspect -d 'Inspect container'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from instance instances; and __fish_seen_subcommand_from logs" -l tail -d 'Number of lines' -x
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from instance instances; and __fish_seen_subcommand_from logs" -l since -d 'Since timestamp' -x
|
||||
|
||||
# claude subcommands
|
||||
set -l claude_cmds generate show add remove
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from claude; and not __fish_seen_subcommand_from $claude_cmds" -a generate -d 'Generate .mcp.json'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from claude; and not __fish_seen_subcommand_from $claude_cmds" -a show -d 'Show .mcp.json'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from claude; and not __fish_seen_subcommand_from $claude_cmds" -a add -d 'Add server entry'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from claude; and not __fish_seen_subcommand_from $claude_cmds" -a remove -d 'Remove server entry'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from claude; and __fish_seen_subcommand_from $claude_cmds" -s p -l path -d 'Path to .mcp.json' -rF
|
||||
|
||||
# project subcommands
|
||||
set -l project_cmds list ls create delete rm show profiles set-profiles
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from project projects; and not __fish_seen_subcommand_from $project_cmds" -a list -d 'List projects'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from project projects; and not __fish_seen_subcommand_from $project_cmds" -a create -d 'Create project'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from project projects; and not __fish_seen_subcommand_from $project_cmds" -a delete -d 'Delete project'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from project projects; and not __fish_seen_subcommand_from $project_cmds" -a show -d 'Show project'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from project projects; and not __fish_seen_subcommand_from $project_cmds" -a profiles -d 'List profiles'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from project projects; and not __fish_seen_subcommand_from $project_cmds" -a set-profiles -d 'Set profiles'
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from project projects; and __fish_seen_subcommand_from create" -s d -l description -d 'Description' -x
|
||||
|
||||
# backup options
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from backup" -s o -l output -d 'Output file' -rF
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from backup" -s p -l password -d 'Encryption password' -x
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from backup" -s r -l resources -d 'Resources to backup' -xa 'servers profiles projects'
|
||||
|
||||
# restore options
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from restore" -s i -l input -d 'Input file' -rF
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from restore" -s p -l password -d 'Decryption password' -x
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from restore" -s c -l conflict -d 'Conflict strategy' -xa 'skip overwrite fail'
|
||||
|
||||
# apply takes a file
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from apply" -F
|
||||
|
||||
# help completions
|
||||
complete -c mcpctl -n "__fish_seen_subcommand_from help" -a "$commands"
|
||||
398
deploy.sh
Executable file
398
deploy.sh
Executable file
@@ -0,0 +1,398 @@
|
||||
#!/bin/bash
|
||||
# Deploy mcpctl stack to Portainer
|
||||
# Usage: ./deploy.sh [--dry-run]
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
STACK_DIR="$SCRIPT_DIR/stack"
|
||||
COMPOSE_FILE="$STACK_DIR/docker-compose.yml"
|
||||
ENV_FILE="$STACK_DIR/.env"
|
||||
|
||||
# Portainer configuration
|
||||
PORTAINER_URL="${PORTAINER_URL:-http://10.0.0.194:9000}"
|
||||
PORTAINER_USER="${PORTAINER_USER:-michal}"
|
||||
STACK_NAME="mcpctl"
|
||||
ENDPOINT_ID="2"
|
||||
|
||||
# Colors for output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${GREEN}[INFO]${NC} $1" >&2; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1" >&2; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $1" >&2; }
|
||||
|
||||
check_files() {
|
||||
if [[ ! -f "$COMPOSE_FILE" ]]; then
|
||||
log_error "Compose file not found: $COMPOSE_FILE"
|
||||
exit 1
|
||||
fi
|
||||
if [[ ! -f "$ENV_FILE" ]]; then
|
||||
log_error "Environment file not found: $ENV_FILE"
|
||||
exit 1
|
||||
fi
|
||||
log_info "Found compose file: $COMPOSE_FILE"
|
||||
log_info "Found env file: $ENV_FILE"
|
||||
}
|
||||
|
||||
get_password() {
|
||||
if [[ -n "$PORTAINER_PASSWORD" ]]; then
|
||||
echo "$PORTAINER_PASSWORD"
|
||||
return
|
||||
fi
|
||||
if [[ -f "$SCRIPT_DIR/.portainer_password" ]]; then
|
||||
cat "$SCRIPT_DIR/.portainer_password"
|
||||
return
|
||||
fi
|
||||
if [[ -f "$HOME/.portainer_password" ]]; then
|
||||
cat "$HOME/.portainer_password"
|
||||
return
|
||||
fi
|
||||
read -s -p "Enter Portainer password for $PORTAINER_USER: " password
|
||||
echo >&2
|
||||
echo "$password"
|
||||
}
|
||||
|
||||
get_jwt_token() {
|
||||
local password="$1"
|
||||
log_info "Authenticating to Portainer..."
|
||||
|
||||
local escaped_password
|
||||
escaped_password=$(printf '%s' "$password" | jq -Rs .)
|
||||
|
||||
local response
|
||||
response=$(curl -s -X POST "$PORTAINER_URL/api/auth" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"Username\":\"$PORTAINER_USER\",\"Password\":$escaped_password}")
|
||||
|
||||
local token
|
||||
token=$(echo "$response" | jq -r '.jwt // empty')
|
||||
|
||||
if [[ -z "$token" ]]; then
|
||||
log_error "Authentication failed: $(echo "$response" | jq -r '.message // "Unknown error"')"
|
||||
exit 1
|
||||
fi
|
||||
echo "$token"
|
||||
}
|
||||
|
||||
parse_env_to_json() {
|
||||
local env_file="$1"
|
||||
local json_array="["
|
||||
local first=true
|
||||
|
||||
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||
[[ "$line" =~ ^#.*$ ]] && continue
|
||||
[[ -z "$line" ]] && continue
|
||||
|
||||
local name="${line%%=*}"
|
||||
local value="${line#*=}"
|
||||
[[ "$name" == "$line" ]] && continue
|
||||
|
||||
if [[ "$first" == "true" ]]; then
|
||||
first=false
|
||||
else
|
||||
json_array+=","
|
||||
fi
|
||||
|
||||
value=$(echo "$value" | sed 's/\\/\\\\/g; s/"/\\"/g')
|
||||
json_array+="{\"name\":\"$name\",\"value\":\"$value\"}"
|
||||
done < "$env_file"
|
||||
|
||||
json_array+="]"
|
||||
echo "$json_array"
|
||||
}
|
||||
|
||||
# Find existing stack by name
|
||||
find_stack_id() {
|
||||
local token="$1"
|
||||
local response
|
||||
response=$(curl -s -X GET "$PORTAINER_URL/api/stacks" \
|
||||
-H "Authorization: Bearer $token")
|
||||
|
||||
echo "$response" | jq -r --arg name "$STACK_NAME" \
|
||||
'.[] | select(.Name == $name) | .Id // empty'
|
||||
}
|
||||
|
||||
get_stack_info() {
|
||||
local token="$1"
|
||||
local stack_id="$2"
|
||||
curl -s -X GET "$PORTAINER_URL/api/stacks/$stack_id" \
|
||||
-H "Authorization: Bearer $token" \
|
||||
-H "Content-Type: application/json"
|
||||
}
|
||||
|
||||
get_stack_file() {
|
||||
local token="$1"
|
||||
local stack_id="$2"
|
||||
local response
|
||||
response=$(curl -s -X GET "$PORTAINER_URL/api/stacks/$stack_id/file" \
|
||||
-H "Authorization: Bearer $token" \
|
||||
-H "Content-Type: application/json")
|
||||
|
||||
if echo "$response" | jq -e '.StackFileContent' > /dev/null 2>&1; then
|
||||
echo "$response" | jq -r '.StackFileContent'
|
||||
else
|
||||
echo "# Could not retrieve current compose file"
|
||||
fi
|
||||
}
|
||||
|
||||
show_diff() {
|
||||
local token="$1"
|
||||
local stack_id="$2"
|
||||
local env_json="$3"
|
||||
|
||||
log_info "Fetching current state from Portainer..."
|
||||
|
||||
local current_compose
|
||||
current_compose=$(get_stack_file "$token" "$stack_id")
|
||||
|
||||
local current_env
|
||||
local stack_info
|
||||
stack_info=$(get_stack_info "$token" "$stack_id")
|
||||
current_env=$(echo "$stack_info" | jq -r 'if .Env then .Env[] | "\(.name)=\(.value)" else empty end' 2>/dev/null | sort)
|
||||
|
||||
local new_env
|
||||
new_env=$(echo "$env_json" | jq -r '.[] | "\(.name)=\(.value)"' | sort)
|
||||
|
||||
local tmp_dir
|
||||
tmp_dir=$(mktemp -d)
|
||||
|
||||
echo "$current_compose" > "$tmp_dir/current_compose.yml"
|
||||
cat "$COMPOSE_FILE" > "$tmp_dir/new_compose.yml"
|
||||
echo "$current_env" > "$tmp_dir/current_env.txt"
|
||||
echo "$new_env" > "$tmp_dir/new_env.txt"
|
||||
|
||||
echo ""
|
||||
echo "=== ENVIRONMENT VARIABLES DIFF ==="
|
||||
echo ""
|
||||
|
||||
if diff -u "$tmp_dir/current_env.txt" "$tmp_dir/new_env.txt" > "$tmp_dir/env_diff.txt" 2>&1; then
|
||||
echo -e "${GREEN}No changes in environment variables${NC}"
|
||||
else
|
||||
while IFS= read -r line; do
|
||||
if [[ "$line" == ---* ]] || [[ "$line" == +++* ]] || [[ "$line" == @@* ]]; then
|
||||
echo -e "${YELLOW}$line${NC}"
|
||||
elif [[ "$line" == -* ]]; then
|
||||
echo -e "${RED}$line${NC}"
|
||||
elif [[ "$line" == +* ]]; then
|
||||
echo -e "${GREEN}$line${NC}"
|
||||
else
|
||||
echo "$line"
|
||||
fi
|
||||
done < "$tmp_dir/env_diff.txt"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "=== COMPOSE FILE DIFF ==="
|
||||
echo ""
|
||||
|
||||
if diff -u "$tmp_dir/current_compose.yml" "$tmp_dir/new_compose.yml" > "$tmp_dir/compose_diff.txt" 2>&1; then
|
||||
echo -e "${GREEN}No changes in compose file${NC}"
|
||||
else
|
||||
while IFS= read -r line; do
|
||||
if [[ "$line" == ---* ]] || [[ "$line" == +++* ]] || [[ "$line" == @@* ]]; then
|
||||
echo -e "${YELLOW}$line${NC}"
|
||||
elif [[ "$line" == -* ]]; then
|
||||
echo -e "${RED}$line${NC}"
|
||||
elif [[ "$line" == +* ]]; then
|
||||
echo -e "${GREEN}$line${NC}"
|
||||
else
|
||||
echo "$line"
|
||||
fi
|
||||
done < "$tmp_dir/compose_diff.txt"
|
||||
fi
|
||||
|
||||
rm -rf "$tmp_dir"
|
||||
}
|
||||
|
||||
create_stack() {
|
||||
local token="$1"
|
||||
local env_json="$2"
|
||||
|
||||
local compose_content
|
||||
compose_content=$(cat "$COMPOSE_FILE")
|
||||
|
||||
local compose_escaped
|
||||
compose_escaped=$(echo "$compose_content" | jq -Rs .)
|
||||
|
||||
log_info "Creating new stack '$STACK_NAME'..."
|
||||
|
||||
local payload
|
||||
payload=$(jq -n \
|
||||
--arg name "$STACK_NAME" \
|
||||
--argjson env "$env_json" \
|
||||
--argjson stackFileContent "$compose_escaped" \
|
||||
'{
|
||||
"name": $name,
|
||||
"env": $env,
|
||||
"stackFileContent": $stackFileContent
|
||||
}')
|
||||
|
||||
local response
|
||||
response=$(curl -s -X POST "$PORTAINER_URL/api/stacks?type=2&method=string&endpointId=$ENDPOINT_ID" \
|
||||
-H "Authorization: Bearer $token" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$payload")
|
||||
|
||||
local error_msg
|
||||
error_msg=$(echo "$response" | jq -r '.message // empty')
|
||||
|
||||
if [[ -n "$error_msg" ]]; then
|
||||
log_error "Stack creation failed: $error_msg"
|
||||
echo "$response" | jq .
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local new_id
|
||||
new_id=$(echo "$response" | jq -r '.Id')
|
||||
log_info "Stack created successfully! (ID: $new_id)"
|
||||
echo "$response" | jq '{Id, Name, Status, CreationDate}'
|
||||
}
|
||||
|
||||
update_stack() {
|
||||
local token="$1"
|
||||
local stack_id="$2"
|
||||
local dry_run="$3"
|
||||
|
||||
local compose_content
|
||||
compose_content=$(cat "$COMPOSE_FILE")
|
||||
|
||||
local env_json
|
||||
env_json=$(parse_env_to_json "$ENV_FILE")
|
||||
|
||||
if [[ "$dry_run" == "true" ]]; then
|
||||
log_warn "DRY RUN - Not actually deploying"
|
||||
show_diff "$token" "$stack_id" "$env_json"
|
||||
echo ""
|
||||
log_warn "DRY RUN complete - no changes made"
|
||||
log_info "Run without --dry-run to apply these changes"
|
||||
return 0
|
||||
fi
|
||||
|
||||
local env_count
|
||||
env_count=$(echo "$env_json" | jq 'length')
|
||||
log_info "Deploying $env_count environment variables"
|
||||
log_info "Updating stack '$STACK_NAME' (ID: $stack_id)..."
|
||||
|
||||
local compose_escaped
|
||||
compose_escaped=$(echo "$compose_content" | jq -Rs .)
|
||||
|
||||
local payload
|
||||
payload=$(jq -n \
|
||||
--argjson env "$env_json" \
|
||||
--argjson stackFileContent "$compose_escaped" \
|
||||
'{
|
||||
"env": $env,
|
||||
"stackFileContent": $stackFileContent,
|
||||
"prune": true,
|
||||
"pullImage": true
|
||||
}')
|
||||
|
||||
local response
|
||||
response=$(curl -s -X PUT "$PORTAINER_URL/api/stacks/$stack_id?endpointId=$ENDPOINT_ID" \
|
||||
-H "Authorization: Bearer $token" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$payload")
|
||||
|
||||
local error_msg
|
||||
error_msg=$(echo "$response" | jq -r '.message // empty')
|
||||
|
||||
if [[ -n "$error_msg" ]]; then
|
||||
log_error "Deployment failed: $error_msg"
|
||||
echo "$response" | jq .
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log_info "Stack updated successfully!"
|
||||
echo "$response" | jq '{Id, Name, Status, CreationDate, UpdateDate}'
|
||||
}
|
||||
|
||||
main() {
|
||||
local dry_run=false
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--dry-run)
|
||||
dry_run=true
|
||||
shift
|
||||
;;
|
||||
--help|-h)
|
||||
echo "Usage: $0 [--dry-run]"
|
||||
echo ""
|
||||
echo "Deploy mcpctl stack to Portainer"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --dry-run Show what would be deployed without actually deploying"
|
||||
echo " --help Show this help message"
|
||||
echo ""
|
||||
echo "Environment variables:"
|
||||
echo " PORTAINER_URL Portainer URL (default: http://10.0.0.194:9000)"
|
||||
echo " PORTAINER_USER Portainer username (default: michal)"
|
||||
echo " PORTAINER_PASSWORD Portainer password (or store in ~/.portainer_password)"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
log_error "Unknown option: $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
echo "========================================"
|
||||
echo " mcpctl Stack Deployment"
|
||||
echo "========================================"
|
||||
echo ""
|
||||
|
||||
check_files
|
||||
|
||||
local password
|
||||
password=$(get_password)
|
||||
|
||||
local token
|
||||
token=$(get_jwt_token "$password")
|
||||
log_info "Authentication successful"
|
||||
|
||||
# Find or create stack
|
||||
local stack_id
|
||||
stack_id=$(find_stack_id "$token")
|
||||
|
||||
if [[ -z "$stack_id" ]]; then
|
||||
if [[ "$dry_run" == "true" ]]; then
|
||||
log_warn "Stack '$STACK_NAME' does not exist yet"
|
||||
log_info "A real deploy would create it"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_info "Stack '$STACK_NAME' not found, creating..."
|
||||
local env_json
|
||||
env_json=$(parse_env_to_json "$ENV_FILE")
|
||||
create_stack "$token" "$env_json"
|
||||
else
|
||||
local stack_info
|
||||
stack_info=$(get_stack_info "$token" "$stack_id")
|
||||
local status_code
|
||||
status_code=$(echo "$stack_info" | jq -r '.Status // 0')
|
||||
local status_text="Unknown"
|
||||
case "$status_code" in
|
||||
1) status_text="Active" ;;
|
||||
2) status_text="Inactive" ;;
|
||||
esac
|
||||
log_info "Current stack status: $status_text (ID: $stack_id, Env vars: $(echo "$stack_info" | jq '.Env | length'))"
|
||||
|
||||
echo ""
|
||||
update_stack "$token" "$stack_id" "$dry_run"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
log_info "Done!"
|
||||
|
||||
if [[ "$dry_run" == "false" ]]; then
|
||||
log_info "Check Portainer UI to verify containers are running"
|
||||
log_info "URL: $PORTAINER_URL/#!/$ENDPOINT_ID/docker/stacks/$STACK_NAME"
|
||||
fi
|
||||
}
|
||||
|
||||
main "$@"
|
||||
64
deploy/Dockerfile.mcpd
Normal file
64
deploy/Dockerfile.mcpd
Normal file
@@ -0,0 +1,64 @@
|
||||
# Stage 1: Build TypeScript
|
||||
FROM node:20-alpine AS builder
|
||||
|
||||
RUN corepack enable && corepack prepare pnpm@9.15.0 --activate
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy workspace config and package manifests
|
||||
COPY pnpm-workspace.yaml pnpm-lock.yaml package.json tsconfig.base.json ./
|
||||
COPY src/mcpd/package.json src/mcpd/tsconfig.json src/mcpd/
|
||||
COPY src/db/package.json src/db/tsconfig.json src/db/
|
||||
COPY src/shared/package.json src/shared/tsconfig.json src/shared/
|
||||
|
||||
# Install all dependencies
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
# Copy source code
|
||||
COPY src/mcpd/src/ src/mcpd/src/
|
||||
COPY src/db/src/ src/db/src/
|
||||
COPY src/db/prisma/ src/db/prisma/
|
||||
COPY src/shared/src/ src/shared/src/
|
||||
|
||||
# Generate Prisma client and build TypeScript
|
||||
RUN pnpm -F @mcpctl/db db:generate
|
||||
RUN pnpm -F @mcpctl/shared build && pnpm -F @mcpctl/db build && pnpm -F @mcpctl/mcpd build
|
||||
|
||||
# Stage 2: Production runtime
|
||||
FROM node:20-alpine
|
||||
|
||||
RUN corepack enable && corepack prepare pnpm@9.15.0 --activate
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy workspace config, manifests, and lockfile
|
||||
COPY pnpm-workspace.yaml pnpm-lock.yaml package.json ./
|
||||
COPY src/mcpd/package.json src/mcpd/
|
||||
COPY src/db/package.json src/db/
|
||||
COPY src/shared/package.json src/shared/
|
||||
|
||||
# Install all deps (prisma CLI needed at runtime for db push)
|
||||
RUN pnpm install --frozen-lockfile
|
||||
|
||||
# Copy prisma schema and generate client
|
||||
COPY src/db/prisma/ src/db/prisma/
|
||||
RUN pnpm -F @mcpctl/db db:generate
|
||||
|
||||
# Copy built output from builder
|
||||
COPY --from=builder /app/src/shared/dist/ src/shared/dist/
|
||||
COPY --from=builder /app/src/db/dist/ src/db/dist/
|
||||
COPY --from=builder /app/src/mcpd/dist/ src/mcpd/dist/
|
||||
|
||||
# Copy templates for seeding
|
||||
COPY templates/ templates/
|
||||
|
||||
# Copy entrypoint
|
||||
COPY deploy/entrypoint.sh /entrypoint.sh
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
EXPOSE 3100
|
||||
|
||||
HEALTHCHECK --interval=10s --timeout=5s --retries=3 --start-period=10s \
|
||||
CMD wget -q --spider http://localhost:3100/healthz || exit 1
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
13
deploy/Dockerfile.node-runner
Normal file
13
deploy/Dockerfile.node-runner
Normal file
@@ -0,0 +1,13 @@
|
||||
# Base container for npm-based MCP servers (STDIO transport).
|
||||
# mcpd uses this image to run `npx -y <packageName>` when a server
|
||||
# has packageName but no dockerImage.
|
||||
# Using slim (Debian) instead of alpine for better npm package compatibility.
|
||||
FROM node:20-slim
|
||||
|
||||
WORKDIR /mcp
|
||||
|
||||
# Pre-warm npx cache directory
|
||||
RUN mkdir -p /root/.npm
|
||||
|
||||
# Default entrypoint — overridden by mcpd via container command
|
||||
ENTRYPOINT ["npx", "-y"]
|
||||
@@ -15,6 +15,50 @@ services:
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
- mcpctl
|
||||
|
||||
mcpd:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: deploy/Dockerfile.mcpd
|
||||
container_name: mcpctl-mcpd
|
||||
ports:
|
||||
- "3100:3100"
|
||||
environment:
|
||||
DATABASE_URL: postgresql://mcpctl:mcpctl_dev@postgres:5432/mcpctl
|
||||
MCPD_PORT: "3100"
|
||||
MCPD_HOST: "0.0.0.0"
|
||||
MCPD_LOG_LEVEL: info
|
||||
MCPD_NODE_RUNNER_IMAGE: mcpctl-node-runner:latest
|
||||
MCPD_MCP_NETWORK: mcp-servers
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
volumes:
|
||||
# Mount container runtime socket (Docker or Podman)
|
||||
# For Docker: /var/run/docker.sock
|
||||
# For Podman: /run/user/<UID>/podman/podman.sock
|
||||
- ${CONTAINER_SOCK:-/var/run/docker.sock}:/var/run/docker.sock
|
||||
networks:
|
||||
- mcpctl
|
||||
- mcp-servers
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "wget -q --spider http://localhost:3100/healthz || exit 1"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
|
||||
# Base image for npm-based MCP servers (built once, used by mcpd)
|
||||
node-runner:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: deploy/Dockerfile.node-runner
|
||||
image: mcpctl-node-runner:latest
|
||||
profiles:
|
||||
- build
|
||||
entrypoint: ["echo", "Image built successfully"]
|
||||
|
||||
postgres-test:
|
||||
image: postgres:16-alpine
|
||||
@@ -32,6 +76,18 @@ services:
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
profiles:
|
||||
- test
|
||||
|
||||
networks:
|
||||
mcpctl:
|
||||
driver: bridge
|
||||
mcp-servers:
|
||||
name: mcp-servers
|
||||
driver: bridge
|
||||
# Not internal — MCP servers need outbound access to reach external APIs
|
||||
# (e.g., Grafana, Home Assistant). Isolation is enforced by not binding
|
||||
# host ports on MCP server containers; only mcpd can reach them.
|
||||
|
||||
volumes:
|
||||
mcpctl-pgdata:
|
||||
|
||||
11
deploy/entrypoint.sh
Executable file
11
deploy/entrypoint.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/bin/sh
|
||||
set -e
|
||||
|
||||
echo "mcpd: pushing database schema..."
|
||||
pnpm -F @mcpctl/db exec prisma db push --schema=prisma/schema.prisma --accept-data-loss 2>&1
|
||||
|
||||
echo "mcpd: seeding templates..."
|
||||
TEMPLATES_DIR=templates node src/mcpd/dist/seed-runner.js
|
||||
|
||||
echo "mcpd: starting server..."
|
||||
exec node src/mcpd/dist/main.js
|
||||
15
deploy/mcplocal.service
Normal file
15
deploy/mcplocal.service
Normal file
@@ -0,0 +1,15 @@
|
||||
[Unit]
|
||||
Description=mcpctl local MCP proxy
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/usr/bin/mcpctl-local
|
||||
Restart=on-failure
|
||||
RestartSec=5
|
||||
Environment=MCPLOCAL_MCPD_URL=http://10.0.0.194:3100
|
||||
Environment=MCPLOCAL_HTTP_PORT=3200
|
||||
Environment=MCPLOCAL_HTTP_HOST=127.0.0.1
|
||||
|
||||
[Install]
|
||||
WantedBy=default.target
|
||||
149
docs/architecture.md
Normal file
149
docs/architecture.md
Normal file
@@ -0,0 +1,149 @@
|
||||
# mcpctl Architecture
|
||||
|
||||
## Overview
|
||||
|
||||
mcpctl is a kubectl-like management tool for MCP (Model Context Protocol) servers. It consists of a CLI, a daemon server, a database layer, a local proxy, and shared utilities.
|
||||
|
||||
## Package Structure
|
||||
|
||||
```
|
||||
src/
|
||||
├── cli/ @mcpctl/cli - Command-line interface
|
||||
├── mcpd/ @mcpctl/mcpd - Daemon server (REST API)
|
||||
├── db/ @mcpctl/db - Database layer (Prisma + PostgreSQL)
|
||||
├── local-proxy/ @mcpctl/local-proxy - MCP protocol proxy
|
||||
└── shared/ @mcpctl/shared - Shared constants and utilities
|
||||
```
|
||||
|
||||
## Component Diagram
|
||||
|
||||
```
|
||||
┌─────────────────┐ HTTP ┌──────────────┐ Prisma ┌────────────┐
|
||||
│ mcpctl CLI │ ──────────────│ mcpd │ ──────────────│ PostgreSQL │
|
||||
│ (Commander.js) │ │ (Fastify 5) │ │ │
|
||||
└─────────────────┘ └──────┬───────┘ └────────────┘
|
||||
│
|
||||
│ Docker/Podman API
|
||||
▼
|
||||
┌──────────────┐
|
||||
│ Containers │
|
||||
│ (MCP servers)│
|
||||
└──────────────┘
|
||||
|
||||
┌─────────────────┐ STDIO ┌──────────────┐ STDIO/HTTP ┌────────────┐
|
||||
│ Claude / LLM │ ────────────│ local-proxy │ ──────────────│ MCP Servers│
|
||||
│ │ │ (McpRouter) │ │ │
|
||||
└─────────────────┘ └──────────────┘ └────────────┘
|
||||
```
|
||||
|
||||
## CLI (`@mcpctl/cli`)
|
||||
|
||||
The CLI is built with Commander.js and communicates with mcpd via HTTP REST.
|
||||
|
||||
### Commands
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `mcpctl get <resource>` | List resources (servers, profiles, projects, instances) |
|
||||
| `mcpctl describe <resource> <id>` | Show detailed resource info |
|
||||
| `mcpctl apply <file>` | Apply declarative YAML/JSON configuration |
|
||||
| `mcpctl setup [name]` | Interactive server setup wizard |
|
||||
| `mcpctl instance list/start/stop/restart/remove/logs/inspect` | Manage instances |
|
||||
| `mcpctl claude generate/show/add/remove` | Manage .mcp.json files |
|
||||
| `mcpctl project list/create/delete/show/profiles/set-profiles` | Manage projects |
|
||||
| `mcpctl config get/set/path` | Manage CLI configuration |
|
||||
| `mcpctl status` | Check daemon connectivity |
|
||||
|
||||
### Configuration
|
||||
|
||||
CLI config is stored at `~/.config/mcpctl/config.json` with:
|
||||
- `daemonUrl`: mcpd server URL (default: `http://localhost:4444`)
|
||||
|
||||
## Daemon (`@mcpctl/mcpd`)
|
||||
|
||||
Fastify 5-based REST API server that manages MCP server lifecycle.
|
||||
|
||||
### Layers
|
||||
|
||||
1. **Routes** - HTTP handlers, parameter extraction
|
||||
2. **Services** - Business logic, validation (Zod schemas), error handling
|
||||
3. **Repositories** - Data access via Prisma (interface-based for testability)
|
||||
|
||||
### API Endpoints
|
||||
|
||||
| Endpoint | Methods | Description |
|
||||
|----------|---------|-------------|
|
||||
| `/api/v1/servers` | GET, POST | MCP server definitions |
|
||||
| `/api/v1/servers/:id` | GET, PUT, DELETE | Single server operations |
|
||||
| `/api/v1/profiles` | GET, POST | Server configuration profiles |
|
||||
| `/api/v1/profiles/:id` | GET, PUT, DELETE | Single profile operations |
|
||||
| `/api/v1/projects` | GET, POST | Project management |
|
||||
| `/api/v1/projects/:id` | GET, PUT, DELETE | Single project operations |
|
||||
| `/api/v1/projects/:id/profiles` | GET, PUT | Project profile assignments |
|
||||
| `/api/v1/projects/:id/mcp-config` | GET | Generate .mcp.json |
|
||||
| `/api/v1/instances` | GET, POST | Instance lifecycle |
|
||||
| `/api/v1/instances/:id` | GET, DELETE | Instance operations |
|
||||
| `/api/v1/instances/:id/stop` | POST | Stop instance |
|
||||
| `/api/v1/instances/:id/restart` | POST | Restart instance |
|
||||
| `/api/v1/instances/:id/inspect` | GET | Container inspection |
|
||||
| `/api/v1/instances/:id/logs` | GET | Container logs |
|
||||
| `/api/v1/audit-logs` | GET | Query audit logs |
|
||||
| `/api/v1/audit-logs/:id` | GET | Single audit log |
|
||||
| `/api/v1/audit-logs/purge` | POST | Purge expired logs |
|
||||
| `/health` | GET | Health check (detailed) |
|
||||
| `/healthz` | GET | Liveness probe |
|
||||
|
||||
### Container Orchestration
|
||||
|
||||
The `McpOrchestrator` interface abstracts container management:
|
||||
- `DockerContainerManager` - Docker/Podman implementation via dockerode
|
||||
- Future: `KubernetesOrchestrator` for k8s deployments
|
||||
|
||||
## Local Proxy (`@mcpctl/local-proxy`)
|
||||
|
||||
Aggregates multiple MCP servers behind a single STDIO endpoint.
|
||||
|
||||
### Features
|
||||
|
||||
- **Tool namespacing**: `servername/toolname` routing
|
||||
- **Resource forwarding**: `resources/list` and `resources/read`
|
||||
- **Prompt forwarding**: `prompts/list` and `prompts/get`
|
||||
- **Notification pass-through**: Upstream notifications forwarded to client
|
||||
- **Health monitoring**: Periodic health checks with state tracking
|
||||
- **Transport support**: STDIO (child process) and HTTP (SSE/Streamable HTTP)
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
# Via config file
|
||||
mcpctl-proxy --config proxy.json
|
||||
|
||||
# Via CLI flags
|
||||
mcpctl-proxy --upstream "slack:npx -y @anthropic/slack-mcp" \
|
||||
--upstream "github:npx -y @anthropic/github-mcp"
|
||||
```
|
||||
|
||||
## Database (`@mcpctl/db`)
|
||||
|
||||
Prisma ORM with PostgreSQL. Key models:
|
||||
|
||||
- **User** / **Session** - Authentication
|
||||
- **McpServer** - Server definitions (name, transport, package, docker image)
|
||||
- **McpProfile** - Per-server configurations (env overrides, permissions)
|
||||
- **Project** - Grouping of profiles for a workspace
|
||||
- **McpInstance** - Running container instances with lifecycle state
|
||||
- **AuditLog** - Immutable operation audit trail
|
||||
|
||||
## Shared (`@mcpctl/shared`)
|
||||
|
||||
Constants and utilities shared across packages:
|
||||
- `APP_NAME`, `APP_VERSION`
|
||||
- Common type definitions
|
||||
|
||||
## Design Principles
|
||||
|
||||
1. **Interface-based repositories** - All data access through interfaces for testability
|
||||
2. **Dependency injection** - Services receive dependencies via constructor
|
||||
3. **Zod validation** - All user input validated with Zod schemas
|
||||
4. **Namespaced errors** - Custom error classes with HTTP status codes
|
||||
5. **TypeScript strict mode** - `exactOptionalPropertyTypes`, `noUncheckedIndexedAccess`
|
||||
157
docs/getting-started.md
Normal file
157
docs/getting-started.md
Normal file
@@ -0,0 +1,157 @@
|
||||
# Getting Started with mcpctl
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Node.js >= 20.0.0
|
||||
- pnpm >= 9.0.0
|
||||
- PostgreSQL (for mcpd)
|
||||
- Docker or Podman (for container management)
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone <repo-url>
|
||||
cd mcpctl
|
||||
|
||||
# Install dependencies
|
||||
pnpm install
|
||||
|
||||
# Generate Prisma client
|
||||
pnpm --filter @mcpctl/db exec prisma generate
|
||||
|
||||
# Build all packages
|
||||
pnpm build
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
### 1. Start the Database
|
||||
|
||||
```bash
|
||||
# Start PostgreSQL via Docker Compose
|
||||
pnpm db:up
|
||||
|
||||
# Run database migrations
|
||||
pnpm --filter @mcpctl/db exec prisma db push
|
||||
```
|
||||
|
||||
### 2. Start the Daemon
|
||||
|
||||
```bash
|
||||
cd src/mcpd
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
The daemon starts on `http://localhost:4444` by default.
|
||||
|
||||
### 3. Use the CLI
|
||||
|
||||
```bash
|
||||
# Check daemon status
|
||||
mcpctl status
|
||||
|
||||
# Register an MCP server
|
||||
mcpctl apply config.yaml
|
||||
|
||||
# Or use the interactive wizard
|
||||
mcpctl setup my-server
|
||||
|
||||
# List registered servers
|
||||
mcpctl get servers
|
||||
|
||||
# Start an instance
|
||||
mcpctl instance start <server-id>
|
||||
|
||||
# Check instance status
|
||||
mcpctl instance list
|
||||
|
||||
# View instance logs
|
||||
mcpctl instance logs <instance-id>
|
||||
```
|
||||
|
||||
### 4. Generate .mcp.json for Claude
|
||||
|
||||
```bash
|
||||
# Create a project
|
||||
mcpctl project create my-workspace
|
||||
|
||||
# Assign profiles to project
|
||||
mcpctl project set-profiles <project-id> <profile-id-1> <profile-id-2>
|
||||
|
||||
# Generate .mcp.json
|
||||
mcpctl claude generate <project-id>
|
||||
|
||||
# Or manually add servers
|
||||
mcpctl claude add my-server -c npx -a -y @my/mcp-server
|
||||
```
|
||||
|
||||
## Example Configuration
|
||||
|
||||
Create a `config.yaml` file:
|
||||
|
||||
```yaml
|
||||
servers:
|
||||
- name: slack
|
||||
description: Slack MCP server
|
||||
transport: STDIO
|
||||
packageName: "@anthropic/slack-mcp"
|
||||
env:
|
||||
- name: SLACK_TOKEN
|
||||
valueFrom:
|
||||
secretRef:
|
||||
name: slack-secrets
|
||||
key: token
|
||||
|
||||
- name: github
|
||||
description: GitHub MCP server
|
||||
transport: STDIO
|
||||
packageName: "@anthropic/github-mcp"
|
||||
|
||||
profiles:
|
||||
- name: default
|
||||
server: slack
|
||||
envOverrides:
|
||||
SLACK_TOKEN: "xoxb-your-token"
|
||||
|
||||
projects:
|
||||
- name: dev-workspace
|
||||
description: Development workspace
|
||||
```
|
||||
|
||||
Apply it:
|
||||
|
||||
```bash
|
||||
mcpctl apply config.yaml
|
||||
```
|
||||
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
pnpm test:run
|
||||
|
||||
# Run tests for a specific package
|
||||
pnpm --filter @mcpctl/cli test:run
|
||||
pnpm --filter @mcpctl/mcpd test:run
|
||||
pnpm --filter @mcpctl/local-proxy test:run
|
||||
|
||||
# Run tests with coverage
|
||||
pnpm test:coverage
|
||||
|
||||
# Typecheck
|
||||
pnpm typecheck
|
||||
|
||||
# Lint
|
||||
pnpm lint
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
```bash
|
||||
# Watch mode for tests
|
||||
pnpm test
|
||||
|
||||
# Build in watch mode
|
||||
cd src/cli && pnpm dev
|
||||
```
|
||||
28
examples/ha-mcp.yaml
Normal file
28
examples/ha-mcp.yaml
Normal file
@@ -0,0 +1,28 @@
|
||||
servers:
|
||||
- name: ha-mcp
|
||||
description: "Home Assistant MCP - smart home control via MCP"
|
||||
dockerImage: "ghcr.io/homeassistant-ai/ha-mcp:2.4"
|
||||
transport: STREAMABLE_HTTP
|
||||
containerPort: 3000
|
||||
# For mcpd-managed containers:
|
||||
command:
|
||||
- python
|
||||
- "-c"
|
||||
- "from ha_mcp.server import HomeAssistantSmartMCPServer; s = HomeAssistantSmartMCPServer(); s.mcp.run(transport='sse', host='0.0.0.0', port=3000)"
|
||||
# For connecting to an already-running instance (host.containers.internal for container-to-host):
|
||||
externalUrl: "http://host.containers.internal:8086/mcp"
|
||||
env:
|
||||
- name: HOMEASSISTANT_URL
|
||||
value: ""
|
||||
- name: HOMEASSISTANT_TOKEN
|
||||
valueFrom:
|
||||
secretRef:
|
||||
name: ha-secrets
|
||||
key: token
|
||||
|
||||
profiles:
|
||||
- name: production
|
||||
server: ha-mcp
|
||||
envOverrides:
|
||||
HOMEASSISTANT_URL: "https://ha.itaz.eu"
|
||||
HOMEASSISTANT_TOKEN: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiIyNjFlZTRhOWI2MGM0YTllOGJkNTIxN2Q3YmVmZDkzNSIsImlhdCI6MTc3MDA3NjYzOCwiZXhwIjoyMDg1NDM2NjM4fQ.17mAQxIrCBrQx3ogqAUetwEt-cngRmJiH-e7sLt-3FY"
|
||||
26
installlocal.sh
Executable file
26
installlocal.sh
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/bin/bash
|
||||
# Build (if needed) and install mcpctl RPM locally
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
RPM_FILE=$(ls dist/mcpctl-*.rpm 2>/dev/null | head -1)
|
||||
|
||||
# Build if no RPM exists or if source is newer than the RPM
|
||||
if [[ -z "$RPM_FILE" ]] || [[ $(find src/ -name '*.ts' -newer "$RPM_FILE" 2>/dev/null | head -1) ]]; then
|
||||
echo "==> Building RPM..."
|
||||
bash scripts/build-rpm.sh
|
||||
RPM_FILE=$(ls dist/mcpctl-*.rpm 2>/dev/null | head -1)
|
||||
else
|
||||
echo "==> RPM is up to date: $RPM_FILE"
|
||||
fi
|
||||
|
||||
echo "==> Installing $RPM_FILE..."
|
||||
sudo rpm -Uvh --force "$RPM_FILE"
|
||||
|
||||
echo "==> Reloading systemd user units..."
|
||||
systemctl --user daemon-reload
|
||||
|
||||
echo "==> Done!"
|
||||
echo " Enable mcplocal: systemctl --user enable --now mcplocal"
|
||||
28
nfpm.yaml
Normal file
28
nfpm.yaml
Normal file
@@ -0,0 +1,28 @@
|
||||
name: mcpctl
|
||||
arch: amd64
|
||||
version: 0.1.0
|
||||
release: "1"
|
||||
maintainer: michal
|
||||
description: kubectl-like CLI for managing MCP servers
|
||||
license: MIT
|
||||
contents:
|
||||
- src: ./dist/mcpctl
|
||||
dst: /usr/bin/mcpctl
|
||||
file_info:
|
||||
mode: 0755
|
||||
- src: ./dist/mcpctl-local
|
||||
dst: /usr/bin/mcpctl-local
|
||||
file_info:
|
||||
mode: 0755
|
||||
- src: ./deploy/mcplocal.service
|
||||
dst: /usr/lib/systemd/user/mcplocal.service
|
||||
file_info:
|
||||
mode: 0644
|
||||
- src: ./completions/mcpctl.bash
|
||||
dst: /usr/share/bash-completion/completions/mcpctl
|
||||
file_info:
|
||||
mode: 0644
|
||||
- src: ./completions/mcpctl.fish
|
||||
dst: /usr/share/fish/vendor_completions.d/mcpctl.fish
|
||||
file_info:
|
||||
mode: 0644
|
||||
@@ -15,7 +15,14 @@
|
||||
"clean": "pnpm -r run clean && rimraf node_modules",
|
||||
"db:up": "docker compose -f deploy/docker-compose.yml up -d",
|
||||
"db:down": "docker compose -f deploy/docker-compose.yml down",
|
||||
"typecheck": "tsc --build"
|
||||
"typecheck": "tsc --build",
|
||||
"rpm:build": "bash scripts/build-rpm.sh",
|
||||
"rpm:publish": "bash scripts/publish-rpm.sh",
|
||||
"release": "bash scripts/release.sh",
|
||||
"mcpd:build": "bash scripts/build-mcpd.sh",
|
||||
"mcpd:deploy": "bash deploy.sh",
|
||||
"mcpd:deploy-dry": "bash deploy.sh --dry-run",
|
||||
"mcpd:logs": "bash logs.sh"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20.0.0",
|
||||
|
||||
825
pnpm-lock.yaml
generated
825
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
32
scripts/build-mcpd.sh
Executable file
32
scripts/build-mcpd.sh
Executable file
@@ -0,0 +1,32 @@
|
||||
#!/bin/bash
|
||||
# Build mcpd Docker image and push to Gitea container registry
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
# Load .env for GITEA_TOKEN
|
||||
if [ -f .env ]; then
|
||||
set -a; source .env; set +a
|
||||
fi
|
||||
|
||||
# Push directly to internal address (external proxy has body size limit)
|
||||
REGISTRY="10.0.0.194:3012"
|
||||
IMAGE="mcpd"
|
||||
TAG="${1:-latest}"
|
||||
|
||||
echo "==> Building mcpd image..."
|
||||
podman build -t "$IMAGE:$TAG" -f deploy/Dockerfile.mcpd .
|
||||
|
||||
echo "==> Tagging as $REGISTRY/michal/$IMAGE:$TAG..."
|
||||
podman tag "$IMAGE:$TAG" "$REGISTRY/michal/$IMAGE:$TAG"
|
||||
|
||||
echo "==> Logging in to $REGISTRY..."
|
||||
podman login --tls-verify=false -u michal -p "$GITEA_TOKEN" "$REGISTRY"
|
||||
|
||||
echo "==> Pushing to $REGISTRY/michal/$IMAGE:$TAG..."
|
||||
podman push --tls-verify=false "$REGISTRY/michal/$IMAGE:$TAG"
|
||||
|
||||
echo "==> Done!"
|
||||
echo " Image: $REGISTRY/michal/$IMAGE:$TAG"
|
||||
31
scripts/build-rpm.sh
Executable file
31
scripts/build-rpm.sh
Executable file
@@ -0,0 +1,31 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
# Load .env if present
|
||||
if [ -f .env ]; then
|
||||
set -a; source .env; set +a
|
||||
fi
|
||||
|
||||
# Ensure tools are on PATH
|
||||
export PATH="$HOME/.npm-global/bin:$HOME/.bun/bin:$HOME/.local/bin:$PATH"
|
||||
|
||||
echo "==> Building TypeScript..."
|
||||
pnpm build
|
||||
|
||||
echo "==> Bundling standalone binaries..."
|
||||
mkdir -p dist
|
||||
rm -f dist/mcpctl dist/mcpctl-local dist/mcpctl-*.rpm
|
||||
bun build src/cli/src/index.ts --compile --outfile dist/mcpctl
|
||||
bun build src/mcplocal/src/main.ts --compile --outfile dist/mcpctl-local
|
||||
|
||||
echo "==> Packaging RPM..."
|
||||
nfpm pkg --packager rpm --target dist/
|
||||
|
||||
RPM_FILE=$(ls dist/mcpctl-*.rpm 2>/dev/null | head -1)
|
||||
echo "==> Built: $RPM_FILE"
|
||||
echo " Size: $(du -h "$RPM_FILE" | cut -f1)"
|
||||
rpm -qpi "$RPM_FILE"
|
||||
55
scripts/publish-rpm.sh
Executable file
55
scripts/publish-rpm.sh
Executable file
@@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
# Load .env if present
|
||||
if [ -f .env ]; then
|
||||
set -a; source .env; set +a
|
||||
fi
|
||||
|
||||
GITEA_URL="${GITEA_URL:-http://10.0.0.194:3012}"
|
||||
GITEA_OWNER="${GITEA_OWNER:-michal}"
|
||||
|
||||
if [ -z "$GITEA_TOKEN" ]; then
|
||||
echo "Error: GITEA_TOKEN not set. Add it to .env or export it."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
RPM_FILE=$(ls dist/mcpctl-*.rpm 2>/dev/null | head -1)
|
||||
if [ -z "$RPM_FILE" ]; then
|
||||
echo "Error: No RPM found in dist/. Run scripts/build-rpm.sh first."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get version string as it appears in Gitea (e.g. "0.1.0-1")
|
||||
RPM_VERSION=$(rpm -qp --queryformat '%{VERSION}-%{RELEASE}' "$RPM_FILE")
|
||||
|
||||
echo "==> Publishing $RPM_FILE (version $RPM_VERSION) to ${GITEA_URL}..."
|
||||
|
||||
# Check if version already exists and delete it first
|
||||
EXISTING=$(curl -s -o /dev/null -w "%{http_code}" \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
"${GITEA_URL}/api/v1/packages/${GITEA_OWNER}/rpm/mcpctl/${RPM_VERSION}")
|
||||
|
||||
if [ "$EXISTING" = "200" ]; then
|
||||
echo "==> Version $RPM_VERSION already exists, replacing..."
|
||||
curl -s -o /dev/null -X DELETE \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
"${GITEA_URL}/api/v1/packages/${GITEA_OWNER}/rpm/mcpctl/${RPM_VERSION}"
|
||||
fi
|
||||
|
||||
# Upload
|
||||
curl --fail -s -X PUT \
|
||||
-H "Authorization: token ${GITEA_TOKEN}" \
|
||||
--upload-file "$RPM_FILE" \
|
||||
"${GITEA_URL}/api/packages/${GITEA_OWNER}/rpm/upload"
|
||||
|
||||
echo ""
|
||||
echo "==> Published successfully!"
|
||||
echo ""
|
||||
echo "Install with:"
|
||||
echo " sudo dnf config-manager --add-repo ${GITEA_URL}/api/packages/${GITEA_OWNER}/rpm.repo"
|
||||
echo " sudo dnf install mcpctl"
|
||||
41
scripts/release.sh
Executable file
41
scripts/release.sh
Executable file
@@ -0,0 +1,41 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
|
||||
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
||||
cd "$PROJECT_ROOT"
|
||||
|
||||
# Load .env if present
|
||||
if [ -f .env ]; then
|
||||
set -a; source .env; set +a
|
||||
fi
|
||||
|
||||
echo "=== mcpctl release ==="
|
||||
echo ""
|
||||
|
||||
# Build
|
||||
bash scripts/build-rpm.sh
|
||||
|
||||
echo ""
|
||||
|
||||
# Publish
|
||||
bash scripts/publish-rpm.sh
|
||||
|
||||
echo ""
|
||||
|
||||
# Install locally
|
||||
echo "==> Installing locally..."
|
||||
RPM_FILE=$(ls dist/mcpctl-*.rpm 2>/dev/null | head -1)
|
||||
sudo rpm -U --force "$RPM_FILE"
|
||||
|
||||
echo ""
|
||||
echo "==> Installed:"
|
||||
mcpctl --version
|
||||
echo ""
|
||||
|
||||
GITEA_URL="${GITEA_URL:-http://10.0.0.194:3012}"
|
||||
GITEA_OWNER="${GITEA_OWNER:-michal}"
|
||||
echo "=== Done! ==="
|
||||
echo "Others can install with:"
|
||||
echo " sudo dnf config-manager --add-repo ${GITEA_URL}/api/packages/${GITEA_OWNER}/rpm.repo"
|
||||
echo " sudo dnf install mcpctl"
|
||||
@@ -23,5 +23,9 @@
|
||||
"inquirer": "^12.0.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"zod": "^3.24.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/node": "^25.3.0"
|
||||
}
|
||||
}
|
||||
|
||||
98
src/cli/src/api-client.ts
Normal file
98
src/cli/src/api-client.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import http from 'node:http';
|
||||
|
||||
export interface ApiClientOptions {
|
||||
baseUrl: string;
|
||||
timeout?: number | undefined;
|
||||
token?: string | undefined;
|
||||
}
|
||||
|
||||
export interface ApiResponse<T = unknown> {
|
||||
status: number;
|
||||
data: T;
|
||||
}
|
||||
|
||||
export class ApiError extends Error {
|
||||
constructor(
|
||||
public readonly status: number,
|
||||
public readonly body: string,
|
||||
) {
|
||||
super(`API error ${status}: ${body}`);
|
||||
this.name = 'ApiError';
|
||||
}
|
||||
}
|
||||
|
||||
function request<T>(method: string, url: string, timeout: number, body?: unknown, token?: string): Promise<ApiResponse<T>> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const parsed = new URL(url);
|
||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' };
|
||||
if (token) {
|
||||
headers['Authorization'] = `Bearer ${token}`;
|
||||
}
|
||||
const opts: http.RequestOptions = {
|
||||
hostname: parsed.hostname,
|
||||
port: parsed.port,
|
||||
path: parsed.pathname + parsed.search,
|
||||
method,
|
||||
timeout,
|
||||
headers,
|
||||
};
|
||||
|
||||
const req = http.request(opts, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
const raw = Buffer.concat(chunks).toString('utf-8');
|
||||
const status = res.statusCode ?? 0;
|
||||
if (status >= 400) {
|
||||
reject(new ApiError(status, raw));
|
||||
return;
|
||||
}
|
||||
try {
|
||||
resolve({ status, data: JSON.parse(raw) as T });
|
||||
} catch {
|
||||
resolve({ status, data: raw as unknown as T });
|
||||
}
|
||||
});
|
||||
});
|
||||
req.on('error', reject);
|
||||
req.on('timeout', () => {
|
||||
req.destroy();
|
||||
reject(new Error(`Request to ${url} timed out`));
|
||||
});
|
||||
if (body !== undefined) {
|
||||
req.write(JSON.stringify(body));
|
||||
}
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
export class ApiClient {
|
||||
private baseUrl: string;
|
||||
private timeout: number;
|
||||
private token?: string | undefined;
|
||||
|
||||
constructor(opts: ApiClientOptions) {
|
||||
this.baseUrl = opts.baseUrl.replace(/\/$/, '');
|
||||
this.timeout = opts.timeout ?? 10000;
|
||||
this.token = opts.token;
|
||||
}
|
||||
|
||||
async get<T = unknown>(path: string): Promise<T> {
|
||||
const res = await request<T>('GET', `${this.baseUrl}${path}`, this.timeout, undefined, this.token);
|
||||
return res.data;
|
||||
}
|
||||
|
||||
async post<T = unknown>(path: string, body?: unknown): Promise<T> {
|
||||
const res = await request<T>('POST', `${this.baseUrl}${path}`, this.timeout, body, this.token);
|
||||
return res.data;
|
||||
}
|
||||
|
||||
async put<T = unknown>(path: string, body?: unknown): Promise<T> {
|
||||
const res = await request<T>('PUT', `${this.baseUrl}${path}`, this.timeout, body, this.token);
|
||||
return res.data;
|
||||
}
|
||||
|
||||
async delete(path: string): Promise<void> {
|
||||
await request('DELETE', `${this.baseUrl}${path}`, this.timeout, undefined, this.token);
|
||||
}
|
||||
}
|
||||
50
src/cli/src/auth/credentials.ts
Normal file
50
src/cli/src/auth/credentials.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync, chmodSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { homedir } from 'node:os';
|
||||
|
||||
export interface StoredCredentials {
|
||||
token: string;
|
||||
mcpdUrl: string;
|
||||
user: string;
|
||||
expiresAt?: string;
|
||||
}
|
||||
|
||||
export interface CredentialsDeps {
|
||||
configDir: string;
|
||||
}
|
||||
|
||||
function defaultConfigDir(): string {
|
||||
return join(homedir(), '.mcpctl');
|
||||
}
|
||||
|
||||
function credentialsPath(deps?: Partial<CredentialsDeps>): string {
|
||||
return join(deps?.configDir ?? defaultConfigDir(), 'credentials');
|
||||
}
|
||||
|
||||
export function saveCredentials(creds: StoredCredentials, deps?: Partial<CredentialsDeps>): void {
|
||||
const dir = deps?.configDir ?? defaultConfigDir();
|
||||
if (!existsSync(dir)) {
|
||||
mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
const path = credentialsPath(deps);
|
||||
writeFileSync(path, JSON.stringify(creds, null, 2) + '\n', 'utf-8');
|
||||
chmodSync(path, 0o600);
|
||||
}
|
||||
|
||||
export function loadCredentials(deps?: Partial<CredentialsDeps>): StoredCredentials | null {
|
||||
const path = credentialsPath(deps);
|
||||
if (!existsSync(path)) {
|
||||
return null;
|
||||
}
|
||||
const raw = readFileSync(path, 'utf-8');
|
||||
return JSON.parse(raw) as StoredCredentials;
|
||||
}
|
||||
|
||||
export function deleteCredentials(deps?: Partial<CredentialsDeps>): boolean {
|
||||
const path = credentialsPath(deps);
|
||||
if (!existsSync(path)) {
|
||||
return false;
|
||||
}
|
||||
unlinkSync(path);
|
||||
return true;
|
||||
}
|
||||
2
src/cli/src/auth/index.ts
Normal file
2
src/cli/src/auth/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { saveCredentials, loadCredentials, deleteCredentials } from './credentials.js';
|
||||
export type { StoredCredentials, CredentialsDeps } from './credentials.js';
|
||||
202
src/cli/src/commands/apply.ts
Normal file
202
src/cli/src/commands/apply.ts
Normal file
@@ -0,0 +1,202 @@
|
||||
import { Command } from 'commander';
|
||||
import { readFileSync } from 'node:fs';
|
||||
import yaml from 'js-yaml';
|
||||
import { z } from 'zod';
|
||||
import type { ApiClient } from '../api-client.js';
|
||||
|
||||
const HealthCheckSchema = z.object({
|
||||
tool: z.string().min(1),
|
||||
arguments: z.record(z.unknown()).default({}),
|
||||
intervalSeconds: z.number().int().min(5).max(3600).default(60),
|
||||
timeoutSeconds: z.number().int().min(1).max(120).default(10),
|
||||
failureThreshold: z.number().int().min(1).max(20).default(3),
|
||||
});
|
||||
|
||||
const ServerEnvEntrySchema = z.object({
|
||||
name: z.string().min(1),
|
||||
value: z.string().optional(),
|
||||
valueFrom: z.object({
|
||||
secretRef: z.object({ name: z.string(), key: z.string() }),
|
||||
}).optional(),
|
||||
});
|
||||
|
||||
const ServerSpecSchema = z.object({
|
||||
name: z.string().min(1),
|
||||
description: z.string().default(''),
|
||||
packageName: z.string().optional(),
|
||||
dockerImage: z.string().optional(),
|
||||
transport: z.enum(['STDIO', 'SSE', 'STREAMABLE_HTTP']).default('STDIO'),
|
||||
repositoryUrl: z.string().url().optional(),
|
||||
externalUrl: z.string().url().optional(),
|
||||
command: z.array(z.string()).optional(),
|
||||
containerPort: z.number().int().min(1).max(65535).optional(),
|
||||
replicas: z.number().int().min(0).max(10).default(1),
|
||||
env: z.array(ServerEnvEntrySchema).default([]),
|
||||
healthCheck: HealthCheckSchema.optional(),
|
||||
});
|
||||
|
||||
const SecretSpecSchema = z.object({
|
||||
name: z.string().min(1),
|
||||
data: z.record(z.string()).default({}),
|
||||
});
|
||||
|
||||
const TemplateEnvEntrySchema = z.object({
|
||||
name: z.string().min(1),
|
||||
description: z.string().optional(),
|
||||
required: z.boolean().optional(),
|
||||
defaultValue: z.string().optional(),
|
||||
});
|
||||
|
||||
const TemplateSpecSchema = z.object({
|
||||
name: z.string().min(1),
|
||||
version: z.string().default('1.0.0'),
|
||||
description: z.string().default(''),
|
||||
packageName: z.string().optional(),
|
||||
dockerImage: z.string().optional(),
|
||||
transport: z.enum(['STDIO', 'SSE', 'STREAMABLE_HTTP']).default('STDIO'),
|
||||
repositoryUrl: z.string().optional(),
|
||||
externalUrl: z.string().optional(),
|
||||
command: z.array(z.string()).optional(),
|
||||
containerPort: z.number().int().min(1).max(65535).optional(),
|
||||
replicas: z.number().int().min(0).max(10).default(1),
|
||||
env: z.array(TemplateEnvEntrySchema).default([]),
|
||||
healthCheck: HealthCheckSchema.optional(),
|
||||
});
|
||||
|
||||
const ProjectSpecSchema = z.object({
|
||||
name: z.string().min(1),
|
||||
description: z.string().default(''),
|
||||
});
|
||||
|
||||
const ApplyConfigSchema = z.object({
|
||||
servers: z.array(ServerSpecSchema).default([]),
|
||||
secrets: z.array(SecretSpecSchema).default([]),
|
||||
projects: z.array(ProjectSpecSchema).default([]),
|
||||
templates: z.array(TemplateSpecSchema).default([]),
|
||||
});
|
||||
|
||||
export type ApplyConfig = z.infer<typeof ApplyConfigSchema>;
|
||||
|
||||
export interface ApplyCommandDeps {
|
||||
client: ApiClient;
|
||||
log: (...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
export function createApplyCommand(deps: ApplyCommandDeps): Command {
|
||||
const { client, log } = deps;
|
||||
|
||||
return new Command('apply')
|
||||
.description('Apply declarative configuration from a YAML or JSON file')
|
||||
.argument('<file>', 'Path to config file (.yaml, .yml, or .json)')
|
||||
.option('--dry-run', 'Validate and show changes without applying')
|
||||
.action(async (file: string, opts: { dryRun?: boolean }) => {
|
||||
const config = loadConfigFile(file);
|
||||
|
||||
if (opts.dryRun) {
|
||||
log('Dry run - would apply:');
|
||||
if (config.servers.length > 0) log(` ${config.servers.length} server(s)`);
|
||||
if (config.secrets.length > 0) log(` ${config.secrets.length} secret(s)`);
|
||||
if (config.projects.length > 0) log(` ${config.projects.length} project(s)`);
|
||||
if (config.templates.length > 0) log(` ${config.templates.length} template(s)`);
|
||||
return;
|
||||
}
|
||||
|
||||
await applyConfig(client, config, log);
|
||||
});
|
||||
}
|
||||
|
||||
function loadConfigFile(path: string): ApplyConfig {
|
||||
const raw = readFileSync(path, 'utf-8');
|
||||
let parsed: unknown;
|
||||
|
||||
if (path.endsWith('.json')) {
|
||||
parsed = JSON.parse(raw);
|
||||
} else {
|
||||
parsed = yaml.load(raw);
|
||||
}
|
||||
|
||||
return ApplyConfigSchema.parse(parsed);
|
||||
}
|
||||
|
||||
async function applyConfig(client: ApiClient, config: ApplyConfig, log: (...args: unknown[]) => void): Promise<void> {
|
||||
// Apply servers first
|
||||
for (const server of config.servers) {
|
||||
try {
|
||||
const existing = await findByName(client, 'servers', server.name);
|
||||
if (existing) {
|
||||
await client.put(`/api/v1/servers/${(existing as { id: string }).id}`, server);
|
||||
log(`Updated server: ${server.name}`);
|
||||
} else {
|
||||
await client.post('/api/v1/servers', server);
|
||||
log(`Created server: ${server.name}`);
|
||||
}
|
||||
} catch (err) {
|
||||
log(`Error applying server '${server.name}': ${err instanceof Error ? err.message : err}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply secrets
|
||||
for (const secret of config.secrets) {
|
||||
try {
|
||||
const existing = await findByName(client, 'secrets', secret.name);
|
||||
if (existing) {
|
||||
await client.put(`/api/v1/secrets/${(existing as { id: string }).id}`, { data: secret.data });
|
||||
log(`Updated secret: ${secret.name}`);
|
||||
} else {
|
||||
await client.post('/api/v1/secrets', secret);
|
||||
log(`Created secret: ${secret.name}`);
|
||||
}
|
||||
} catch (err) {
|
||||
log(`Error applying secret '${secret.name}': ${err instanceof Error ? err.message : err}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply projects
|
||||
for (const project of config.projects) {
|
||||
try {
|
||||
const existing = await findByName(client, 'projects', project.name);
|
||||
if (existing) {
|
||||
await client.put(`/api/v1/projects/${(existing as { id: string }).id}`, {
|
||||
description: project.description,
|
||||
});
|
||||
log(`Updated project: ${project.name}`);
|
||||
} else {
|
||||
await client.post('/api/v1/projects', {
|
||||
name: project.name,
|
||||
description: project.description,
|
||||
});
|
||||
log(`Created project: ${project.name}`);
|
||||
}
|
||||
} catch (err) {
|
||||
log(`Error applying project '${project.name}': ${err instanceof Error ? err.message : err}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply templates
|
||||
for (const template of config.templates) {
|
||||
try {
|
||||
const existing = await findByName(client, 'templates', template.name);
|
||||
if (existing) {
|
||||
await client.put(`/api/v1/templates/${(existing as { id: string }).id}`, template);
|
||||
log(`Updated template: ${template.name}`);
|
||||
} else {
|
||||
await client.post('/api/v1/templates', template);
|
||||
log(`Created template: ${template.name}`);
|
||||
}
|
||||
} catch (err) {
|
||||
log(`Error applying template '${template.name}': ${err instanceof Error ? err.message : err}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function findByName(client: ApiClient, resource: string, name: string): Promise<unknown | null> {
|
||||
try {
|
||||
const items = await client.get<Array<{ name: string }>>(`/api/v1/${resource}`);
|
||||
return items.find((item) => item.name === name) ?? null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Export for testing
|
||||
export { loadConfigFile, applyConfig };
|
||||
148
src/cli/src/commands/auth.ts
Normal file
148
src/cli/src/commands/auth.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
import { Command } from 'commander';
|
||||
import http from 'node:http';
|
||||
import { loadConfig } from '../config/index.js';
|
||||
import type { ConfigLoaderDeps } from '../config/index.js';
|
||||
import { saveCredentials, loadCredentials, deleteCredentials } from '../auth/index.js';
|
||||
import type { CredentialsDeps } from '../auth/index.js';
|
||||
|
||||
export interface PromptDeps {
|
||||
input(message: string): Promise<string>;
|
||||
password(message: string): Promise<string>;
|
||||
}
|
||||
|
||||
export interface AuthCommandDeps {
|
||||
configDeps: Partial<ConfigLoaderDeps>;
|
||||
credentialsDeps: Partial<CredentialsDeps>;
|
||||
prompt: PromptDeps;
|
||||
log: (...args: string[]) => void;
|
||||
loginRequest: (mcpdUrl: string, email: string, password: string) => Promise<LoginResponse>;
|
||||
logoutRequest: (mcpdUrl: string, token: string) => Promise<void>;
|
||||
}
|
||||
|
||||
interface LoginResponse {
|
||||
token: string;
|
||||
user: { email: string };
|
||||
}
|
||||
|
||||
function defaultLoginRequest(mcpdUrl: string, email: string, password: string): Promise<LoginResponse> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const url = new URL('/api/v1/auth/login', mcpdUrl);
|
||||
const body = JSON.stringify({ email, password });
|
||||
const opts: http.RequestOptions = {
|
||||
hostname: url.hostname,
|
||||
port: url.port,
|
||||
path: url.pathname,
|
||||
method: 'POST',
|
||||
timeout: 10000,
|
||||
headers: { 'Content-Type': 'application/json', 'Content-Length': Buffer.byteLength(body) },
|
||||
};
|
||||
const req = http.request(opts, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
const raw = Buffer.concat(chunks).toString('utf-8');
|
||||
if (res.statusCode === 401) {
|
||||
reject(new Error('Invalid credentials'));
|
||||
return;
|
||||
}
|
||||
if ((res.statusCode ?? 0) >= 400) {
|
||||
reject(new Error(`Login failed (${res.statusCode}): ${raw}`));
|
||||
return;
|
||||
}
|
||||
resolve(JSON.parse(raw) as LoginResponse);
|
||||
});
|
||||
});
|
||||
req.on('error', (err) => reject(new Error(`Cannot reach mcpd: ${err.message}`)));
|
||||
req.on('timeout', () => { req.destroy(); reject(new Error('Login request timed out')); });
|
||||
req.write(body);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function defaultLogoutRequest(mcpdUrl: string, token: string): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
const url = new URL('/api/v1/auth/logout', mcpdUrl);
|
||||
const opts: http.RequestOptions = {
|
||||
hostname: url.hostname,
|
||||
port: url.port,
|
||||
path: url.pathname,
|
||||
method: 'POST',
|
||||
timeout: 10000,
|
||||
headers: { 'Authorization': `Bearer ${token}` },
|
||||
};
|
||||
const req = http.request(opts, (res) => {
|
||||
res.resume();
|
||||
res.on('end', () => resolve());
|
||||
});
|
||||
req.on('error', () => resolve()); // Don't fail logout on network errors
|
||||
req.on('timeout', () => { req.destroy(); resolve(); });
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
async function defaultInput(message: string): Promise<string> {
|
||||
const { default: inquirer } = await import('inquirer');
|
||||
const { answer } = await inquirer.prompt([{ type: 'input', name: 'answer', message }]);
|
||||
return answer as string;
|
||||
}
|
||||
|
||||
async function defaultPassword(message: string): Promise<string> {
|
||||
const { default: inquirer } = await import('inquirer');
|
||||
const { answer } = await inquirer.prompt([{ type: 'password', name: 'answer', message }]);
|
||||
return answer as string;
|
||||
}
|
||||
|
||||
const defaultDeps: AuthCommandDeps = {
|
||||
configDeps: {},
|
||||
credentialsDeps: {},
|
||||
prompt: { input: defaultInput, password: defaultPassword },
|
||||
log: (...args) => console.log(...args),
|
||||
loginRequest: defaultLoginRequest,
|
||||
logoutRequest: defaultLogoutRequest,
|
||||
};
|
||||
|
||||
export function createLoginCommand(deps?: Partial<AuthCommandDeps>): Command {
|
||||
const { configDeps, credentialsDeps, prompt, log, loginRequest } = { ...defaultDeps, ...deps };
|
||||
|
||||
return new Command('login')
|
||||
.description('Authenticate with mcpd')
|
||||
.option('--mcpd-url <url>', 'mcpd URL to authenticate against')
|
||||
.action(async (opts: { mcpdUrl?: string }) => {
|
||||
const config = loadConfig(configDeps);
|
||||
const mcpdUrl = opts.mcpdUrl ?? config.mcpdUrl;
|
||||
|
||||
const email = await prompt.input('Email:');
|
||||
const password = await prompt.password('Password:');
|
||||
|
||||
try {
|
||||
const result = await loginRequest(mcpdUrl, email, password);
|
||||
saveCredentials({
|
||||
token: result.token,
|
||||
mcpdUrl,
|
||||
user: result.user.email,
|
||||
}, credentialsDeps);
|
||||
log(`Logged in as ${result.user.email}`);
|
||||
} catch (err) {
|
||||
log(`Login failed: ${(err as Error).message}`);
|
||||
process.exitCode = 1;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export function createLogoutCommand(deps?: Partial<AuthCommandDeps>): Command {
|
||||
const { credentialsDeps, log, logoutRequest } = { ...defaultDeps, ...deps };
|
||||
|
||||
return new Command('logout')
|
||||
.description('Log out and remove stored credentials')
|
||||
.action(async () => {
|
||||
const creds = loadCredentials(credentialsDeps);
|
||||
if (!creds) {
|
||||
log('Not logged in');
|
||||
return;
|
||||
}
|
||||
|
||||
await logoutRequest(creds.mcpdUrl, creds.token);
|
||||
deleteCredentials(credentialsDeps);
|
||||
log('Logged out successfully');
|
||||
});
|
||||
}
|
||||
80
src/cli/src/commands/backup.ts
Normal file
80
src/cli/src/commands/backup.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import { Command } from 'commander';
|
||||
import fs from 'node:fs';
|
||||
import type { ApiClient } from '../api-client.js';
|
||||
|
||||
export interface BackupDeps {
|
||||
client: ApiClient;
|
||||
log: (...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
export function createBackupCommand(deps: BackupDeps): Command {
|
||||
const cmd = new Command('backup')
|
||||
.description('Backup mcpctl configuration to a JSON file')
|
||||
.option('-o, --output <path>', 'output file path', 'mcpctl-backup.json')
|
||||
.option('-p, --password <password>', 'encrypt sensitive values with password')
|
||||
.option('-r, --resources <types>', 'resource types to backup (comma-separated: servers,profiles,projects)')
|
||||
.action(async (options: { output: string; password?: string; resources?: string }) => {
|
||||
const body: Record<string, unknown> = {};
|
||||
if (options.password) {
|
||||
body.password = options.password;
|
||||
}
|
||||
if (options.resources) {
|
||||
body.resources = options.resources.split(',').map((s) => s.trim());
|
||||
}
|
||||
|
||||
const bundle = await deps.client.post('/api/v1/backup', body);
|
||||
fs.writeFileSync(options.output, JSON.stringify(bundle, null, 2), 'utf-8');
|
||||
deps.log(`Backup saved to ${options.output}`);
|
||||
});
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
export function createRestoreCommand(deps: BackupDeps): Command {
|
||||
const cmd = new Command('restore')
|
||||
.description('Restore mcpctl configuration from a backup file')
|
||||
.option('-i, --input <path>', 'backup file path', 'mcpctl-backup.json')
|
||||
.option('-p, --password <password>', 'decryption password for encrypted backups')
|
||||
.option('-c, --conflict <strategy>', 'conflict resolution: skip, overwrite, fail', 'skip')
|
||||
.action(async (options: { input: string; password?: string; conflict: string }) => {
|
||||
if (!fs.existsSync(options.input)) {
|
||||
deps.log(`Error: File not found: ${options.input}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const raw = fs.readFileSync(options.input, 'utf-8');
|
||||
const bundle = JSON.parse(raw) as unknown;
|
||||
|
||||
const body: Record<string, unknown> = {
|
||||
bundle,
|
||||
conflictStrategy: options.conflict,
|
||||
};
|
||||
if (options.password) {
|
||||
body.password = options.password;
|
||||
}
|
||||
|
||||
const result = await deps.client.post<{
|
||||
serversCreated: number;
|
||||
serversSkipped: number;
|
||||
profilesCreated: number;
|
||||
profilesSkipped: number;
|
||||
projectsCreated: number;
|
||||
projectsSkipped: number;
|
||||
errors: string[];
|
||||
}>('/api/v1/restore', body);
|
||||
|
||||
deps.log('Restore complete:');
|
||||
deps.log(` Servers: ${result.serversCreated} created, ${result.serversSkipped} skipped`);
|
||||
deps.log(` Profiles: ${result.profilesCreated} created, ${result.profilesSkipped} skipped`);
|
||||
deps.log(` Projects: ${result.projectsCreated} created, ${result.projectsSkipped} skipped`);
|
||||
|
||||
if (result.errors.length > 0) {
|
||||
deps.log(` Errors:`);
|
||||
for (const err of result.errors) {
|
||||
deps.log(` - ${err}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return cmd;
|
||||
}
|
||||
155
src/cli/src/commands/claude.ts
Normal file
155
src/cli/src/commands/claude.ts
Normal file
@@ -0,0 +1,155 @@
|
||||
import { Command } from 'commander';
|
||||
import { writeFileSync, readFileSync, existsSync } from 'node:fs';
|
||||
import { resolve } from 'node:path';
|
||||
import type { ApiClient } from '../api-client.js';
|
||||
|
||||
interface McpConfig {
|
||||
mcpServers: Record<string, { command: string; args: string[]; env?: Record<string, string> }>;
|
||||
}
|
||||
|
||||
export interface ClaudeCommandDeps {
|
||||
client: ApiClient;
|
||||
log: (...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
export function createClaudeCommand(deps: ClaudeCommandDeps): Command {
|
||||
const { client, log } = deps;
|
||||
|
||||
const cmd = new Command('claude')
|
||||
.description('Manage Claude MCP configuration (.mcp.json)');
|
||||
|
||||
cmd
|
||||
.command('generate <projectId>')
|
||||
.description('Generate .mcp.json from a project configuration')
|
||||
.option('-o, --output <path>', 'Output file path', '.mcp.json')
|
||||
.option('--merge', 'Merge with existing .mcp.json instead of overwriting')
|
||||
.option('--stdout', 'Print to stdout instead of writing a file')
|
||||
.action(async (projectId: string, opts: { output: string; merge?: boolean; stdout?: boolean }) => {
|
||||
const config = await client.get<McpConfig>(`/api/v1/projects/${projectId}/mcp-config`);
|
||||
|
||||
if (opts.stdout) {
|
||||
log(JSON.stringify(config, null, 2));
|
||||
return;
|
||||
}
|
||||
|
||||
const outputPath = resolve(opts.output);
|
||||
let finalConfig = config;
|
||||
|
||||
if (opts.merge && existsSync(outputPath)) {
|
||||
try {
|
||||
const existing = JSON.parse(readFileSync(outputPath, 'utf-8')) as McpConfig;
|
||||
finalConfig = {
|
||||
mcpServers: {
|
||||
...existing.mcpServers,
|
||||
...config.mcpServers,
|
||||
},
|
||||
};
|
||||
} catch {
|
||||
// If existing file is invalid, just overwrite
|
||||
}
|
||||
}
|
||||
|
||||
writeFileSync(outputPath, JSON.stringify(finalConfig, null, 2) + '\n');
|
||||
const serverCount = Object.keys(finalConfig.mcpServers).length;
|
||||
log(`Wrote ${outputPath} (${serverCount} server(s))`);
|
||||
});
|
||||
|
||||
cmd
|
||||
.command('show')
|
||||
.description('Show current .mcp.json configuration')
|
||||
.option('-p, --path <path>', 'Path to .mcp.json', '.mcp.json')
|
||||
.action((opts: { path: string }) => {
|
||||
const filePath = resolve(opts.path);
|
||||
if (!existsSync(filePath)) {
|
||||
log(`No .mcp.json found at ${filePath}`);
|
||||
return;
|
||||
}
|
||||
const content = readFileSync(filePath, 'utf-8');
|
||||
try {
|
||||
const config = JSON.parse(content) as McpConfig;
|
||||
const servers = Object.entries(config.mcpServers ?? {});
|
||||
if (servers.length === 0) {
|
||||
log('No MCP servers configured.');
|
||||
return;
|
||||
}
|
||||
log(`MCP servers in ${filePath}:\n`);
|
||||
for (const [name, server] of servers) {
|
||||
log(` ${name}`);
|
||||
log(` command: ${server.command} ${server.args.join(' ')}`);
|
||||
if (server.env) {
|
||||
const envKeys = Object.keys(server.env);
|
||||
log(` env: ${envKeys.join(', ')}`);
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
log(`Invalid JSON in ${filePath}`);
|
||||
}
|
||||
});
|
||||
|
||||
cmd
|
||||
.command('add <name>')
|
||||
.description('Add an MCP server entry to .mcp.json')
|
||||
.requiredOption('-c, --command <cmd>', 'Command to run')
|
||||
.option('-a, --args <args...>', 'Command arguments')
|
||||
.option('-e, --env <key=value...>', 'Environment variables')
|
||||
.option('-p, --path <path>', 'Path to .mcp.json', '.mcp.json')
|
||||
.action((name: string, opts: { command: string; args?: string[]; env?: string[]; path: string }) => {
|
||||
const filePath = resolve(opts.path);
|
||||
let config: McpConfig = { mcpServers: {} };
|
||||
|
||||
if (existsSync(filePath)) {
|
||||
try {
|
||||
config = JSON.parse(readFileSync(filePath, 'utf-8')) as McpConfig;
|
||||
} catch {
|
||||
// Start fresh
|
||||
}
|
||||
}
|
||||
|
||||
const entry: { command: string; args: string[]; env?: Record<string, string> } = {
|
||||
command: opts.command,
|
||||
args: opts.args ?? [],
|
||||
};
|
||||
|
||||
if (opts.env && opts.env.length > 0) {
|
||||
const env: Record<string, string> = {};
|
||||
for (const pair of opts.env) {
|
||||
const eqIdx = pair.indexOf('=');
|
||||
if (eqIdx > 0) {
|
||||
env[pair.slice(0, eqIdx)] = pair.slice(eqIdx + 1);
|
||||
}
|
||||
}
|
||||
entry.env = env;
|
||||
}
|
||||
|
||||
config.mcpServers[name] = entry;
|
||||
writeFileSync(filePath, JSON.stringify(config, null, 2) + '\n');
|
||||
log(`Added '${name}' to ${filePath}`);
|
||||
});
|
||||
|
||||
cmd
|
||||
.command('remove <name>')
|
||||
.description('Remove an MCP server entry from .mcp.json')
|
||||
.option('-p, --path <path>', 'Path to .mcp.json', '.mcp.json')
|
||||
.action((name: string, opts: { path: string }) => {
|
||||
const filePath = resolve(opts.path);
|
||||
if (!existsSync(filePath)) {
|
||||
log(`No .mcp.json found at ${filePath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const config = JSON.parse(readFileSync(filePath, 'utf-8')) as McpConfig;
|
||||
if (!(name in config.mcpServers)) {
|
||||
log(`Server '${name}' not found in ${filePath}`);
|
||||
return;
|
||||
}
|
||||
delete config.mcpServers[name];
|
||||
writeFileSync(filePath, JSON.stringify(config, null, 2) + '\n');
|
||||
log(`Removed '${name}' from ${filePath}`);
|
||||
} catch {
|
||||
log(`Invalid JSON in ${filePath}`);
|
||||
}
|
||||
});
|
||||
|
||||
return cmd;
|
||||
}
|
||||
72
src/cli/src/commands/config.ts
Normal file
72
src/cli/src/commands/config.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import { Command } from 'commander';
|
||||
import { loadConfig, saveConfig, mergeConfig, getConfigPath, DEFAULT_CONFIG } from '../config/index.js';
|
||||
import type { McpctlConfig, ConfigLoaderDeps } from '../config/index.js';
|
||||
import { formatJson, formatYaml } from '../formatters/index.js';
|
||||
|
||||
export interface ConfigCommandDeps {
|
||||
configDeps: Partial<ConfigLoaderDeps>;
|
||||
log: (...args: string[]) => void;
|
||||
}
|
||||
|
||||
const defaultDeps: ConfigCommandDeps = {
|
||||
configDeps: {},
|
||||
log: (...args) => console.log(...args),
|
||||
};
|
||||
|
||||
export function createConfigCommand(deps?: Partial<ConfigCommandDeps>): Command {
|
||||
const { configDeps, log } = { ...defaultDeps, ...deps };
|
||||
|
||||
const config = new Command('config').description('Manage mcpctl configuration');
|
||||
|
||||
config
|
||||
.command('view')
|
||||
.description('Show current configuration')
|
||||
.option('-o, --output <format>', 'output format (json, yaml)', 'json')
|
||||
.action((opts: { output: string }) => {
|
||||
const cfg = loadConfig(configDeps);
|
||||
const out = opts.output === 'yaml' ? formatYaml(cfg) : formatJson(cfg);
|
||||
log(out);
|
||||
});
|
||||
|
||||
config
|
||||
.command('set')
|
||||
.description('Set a configuration value')
|
||||
.argument('<key>', 'configuration key (e.g., daemonUrl, outputFormat)')
|
||||
.argument('<value>', 'value to set')
|
||||
.action((key: string, value: string) => {
|
||||
const updates: Record<string, unknown> = {};
|
||||
|
||||
// Handle typed conversions
|
||||
if (key === 'cacheTTLMs') {
|
||||
updates[key] = parseInt(value, 10);
|
||||
} else if (key === 'registries') {
|
||||
updates[key] = value.split(',').map((s) => s.trim());
|
||||
} else if (key === 'daemonUrl') {
|
||||
// Backward compat: map daemonUrl to mcplocalUrl
|
||||
updates['mcplocalUrl'] = value;
|
||||
} else {
|
||||
updates[key] = value;
|
||||
}
|
||||
|
||||
const updated = mergeConfig(updates as Partial<McpctlConfig>, configDeps);
|
||||
saveConfig(updated, configDeps);
|
||||
log(`Set ${key} = ${value}`);
|
||||
});
|
||||
|
||||
config
|
||||
.command('path')
|
||||
.description('Show configuration file path')
|
||||
.action(() => {
|
||||
log(getConfigPath(configDeps?.configDir));
|
||||
});
|
||||
|
||||
config
|
||||
.command('reset')
|
||||
.description('Reset configuration to defaults')
|
||||
.action(() => {
|
||||
saveConfig(DEFAULT_CONFIG, configDeps);
|
||||
log('Configuration reset to defaults');
|
||||
});
|
||||
|
||||
return config;
|
||||
}
|
||||
219
src/cli/src/commands/create.ts
Normal file
219
src/cli/src/commands/create.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import { Command } from 'commander';
|
||||
import { type ApiClient, ApiError } from '../api-client.js';
|
||||
export interface CreateCommandDeps {
|
||||
client: ApiClient;
|
||||
log: (...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
function collect(value: string, prev: string[]): string[] {
|
||||
return [...prev, value];
|
||||
}
|
||||
|
||||
interface ServerEnvEntry {
|
||||
name: string;
|
||||
value?: string;
|
||||
valueFrom?: { secretRef: { name: string; key: string } };
|
||||
}
|
||||
|
||||
function parseServerEnv(entries: string[]): ServerEnvEntry[] {
|
||||
return entries.map((entry) => {
|
||||
const eqIdx = entry.indexOf('=');
|
||||
if (eqIdx === -1) {
|
||||
throw new Error(`Invalid env format '${entry}'. Expected KEY=value or KEY=secretRef:SECRET:KEY`);
|
||||
}
|
||||
const envName = entry.slice(0, eqIdx);
|
||||
const rhs = entry.slice(eqIdx + 1);
|
||||
|
||||
if (rhs.startsWith('secretRef:')) {
|
||||
const parts = rhs.split(':');
|
||||
if (parts.length !== 3) {
|
||||
throw new Error(`Invalid secretRef format '${entry}'. Expected KEY=secretRef:SECRET_NAME:SECRET_KEY`);
|
||||
}
|
||||
return {
|
||||
name: envName,
|
||||
valueFrom: { secretRef: { name: parts[1]!, key: parts[2]! } },
|
||||
};
|
||||
}
|
||||
|
||||
return { name: envName, value: rhs };
|
||||
});
|
||||
}
|
||||
|
||||
function parseEnvEntries(entries: string[]): Record<string, string> {
|
||||
const result: Record<string, string> = {};
|
||||
for (const entry of entries) {
|
||||
const eqIdx = entry.indexOf('=');
|
||||
if (eqIdx === -1) {
|
||||
throw new Error(`Invalid env format '${entry}'. Expected KEY=value`);
|
||||
}
|
||||
result[entry.slice(0, eqIdx)] = entry.slice(eqIdx + 1);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function createCreateCommand(deps: CreateCommandDeps): Command {
|
||||
const { client, log } = deps;
|
||||
|
||||
const cmd = new Command('create')
|
||||
.description('Create a resource (server, project)');
|
||||
|
||||
// --- create server ---
|
||||
cmd.command('server')
|
||||
.description('Create an MCP server definition')
|
||||
.argument('<name>', 'Server name (lowercase, hyphens allowed)')
|
||||
.option('-d, --description <text>', 'Server description')
|
||||
.option('--package-name <name>', 'NPM package name')
|
||||
.option('--docker-image <image>', 'Docker image')
|
||||
.option('--transport <type>', 'Transport type (STDIO, SSE, STREAMABLE_HTTP)')
|
||||
.option('--repository-url <url>', 'Source repository URL')
|
||||
.option('--external-url <url>', 'External endpoint URL')
|
||||
.option('--command <arg>', 'Command argument (repeat for multiple)', collect, [])
|
||||
.option('--container-port <port>', 'Container port number')
|
||||
.option('--replicas <count>', 'Number of replicas')
|
||||
.option('--env <entry>', 'Env var: KEY=value (inline) or KEY=secretRef:SECRET:KEY (secret ref, repeat for multiple)', collect, [])
|
||||
.option('--from-template <name>', 'Create from template (name or name:version)')
|
||||
.option('--force', 'Update if already exists')
|
||||
.action(async (name: string, opts) => {
|
||||
let base: Record<string, unknown> = {};
|
||||
|
||||
// If --from-template, fetch template and use as base
|
||||
if (opts.fromTemplate) {
|
||||
const tplRef = opts.fromTemplate as string;
|
||||
const [tplName, tplVersion] = tplRef.includes(':')
|
||||
? [tplRef.slice(0, tplRef.indexOf(':')), tplRef.slice(tplRef.indexOf(':') + 1)]
|
||||
: [tplRef, undefined];
|
||||
|
||||
const templates = await client.get<Array<Record<string, unknown>>>(`/api/v1/templates?name=${encodeURIComponent(tplName)}`);
|
||||
let template: Record<string, unknown> | undefined;
|
||||
if (tplVersion) {
|
||||
template = templates.find((t) => t.name === tplName && t.version === tplVersion);
|
||||
if (!template) throw new Error(`Template '${tplName}' version '${tplVersion}' not found`);
|
||||
} else {
|
||||
template = templates.find((t) => t.name === tplName);
|
||||
if (!template) throw new Error(`Template '${tplName}' not found`);
|
||||
}
|
||||
|
||||
// Copy template fields as base (strip template-only, internal, and null fields)
|
||||
const { id: _id, createdAt: _c, updatedAt: _u, version: _v, name: _n, ...tplFields } = template;
|
||||
base = {};
|
||||
for (const [k, v] of Object.entries(tplFields)) {
|
||||
if (v !== null && v !== undefined) base[k] = v;
|
||||
}
|
||||
|
||||
// Convert template env (description/required) to server env (name/value/valueFrom)
|
||||
const tplEnv = template.env as Array<{ name: string; description?: string; required?: boolean; defaultValue?: string }> | undefined;
|
||||
if (tplEnv && tplEnv.length > 0) {
|
||||
base.env = tplEnv.map((e) => ({ name: e.name, value: e.defaultValue ?? '' }));
|
||||
}
|
||||
|
||||
// Track template origin
|
||||
base.templateName = tplName;
|
||||
base.templateVersion = (template.version as string) ?? '1.0.0';
|
||||
}
|
||||
|
||||
// Build body: template base → CLI overrides (last wins)
|
||||
const body: Record<string, unknown> = {
|
||||
...base,
|
||||
name,
|
||||
};
|
||||
if (opts.description !== undefined) body.description = opts.description;
|
||||
if (opts.transport) body.transport = opts.transport;
|
||||
if (opts.replicas) body.replicas = parseInt(opts.replicas, 10);
|
||||
if (opts.packageName) body.packageName = opts.packageName;
|
||||
if (opts.dockerImage) body.dockerImage = opts.dockerImage;
|
||||
if (opts.repositoryUrl) body.repositoryUrl = opts.repositoryUrl;
|
||||
if (opts.externalUrl) body.externalUrl = opts.externalUrl;
|
||||
if (opts.command.length > 0) body.command = opts.command;
|
||||
if (opts.containerPort) body.containerPort = parseInt(opts.containerPort, 10);
|
||||
if (opts.env.length > 0) {
|
||||
// Merge: CLI env entries override template env entries by name
|
||||
const cliEnv = parseServerEnv(opts.env);
|
||||
const existing = (body.env as ServerEnvEntry[] | undefined) ?? [];
|
||||
const merged = [...existing];
|
||||
for (const entry of cliEnv) {
|
||||
const idx = merged.findIndex((e) => e.name === entry.name);
|
||||
if (idx >= 0) {
|
||||
merged[idx] = entry;
|
||||
} else {
|
||||
merged.push(entry);
|
||||
}
|
||||
}
|
||||
body.env = merged;
|
||||
}
|
||||
|
||||
// Defaults when no template
|
||||
if (!opts.fromTemplate) {
|
||||
if (body.description === undefined) body.description = '';
|
||||
if (!body.transport) body.transport = 'STDIO';
|
||||
if (!body.replicas) body.replicas = 1;
|
||||
}
|
||||
|
||||
try {
|
||||
const server = await client.post<{ id: string; name: string }>('/api/v1/servers', body);
|
||||
log(`server '${server.name}' created (id: ${server.id})`);
|
||||
} catch (err) {
|
||||
if (err instanceof ApiError && err.status === 409 && opts.force) {
|
||||
const existing = (await client.get<Array<{ id: string; name: string }>>('/api/v1/servers')).find((s) => s.name === name);
|
||||
if (!existing) throw err;
|
||||
const { name: _n, ...updateBody } = body;
|
||||
await client.put(`/api/v1/servers/${existing.id}`, updateBody);
|
||||
log(`server '${name}' updated (id: ${existing.id})`);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// --- create secret ---
|
||||
cmd.command('secret')
|
||||
.description('Create a secret')
|
||||
.argument('<name>', 'Secret name (lowercase, hyphens allowed)')
|
||||
.option('--data <entry>', 'Secret data KEY=value (repeat for multiple)', collect, [])
|
||||
.option('--force', 'Update if already exists')
|
||||
.action(async (name: string, opts) => {
|
||||
const data = parseEnvEntries(opts.data);
|
||||
try {
|
||||
const secret = await client.post<{ id: string; name: string }>('/api/v1/secrets', {
|
||||
name,
|
||||
data,
|
||||
});
|
||||
log(`secret '${secret.name}' created (id: ${secret.id})`);
|
||||
} catch (err) {
|
||||
if (err instanceof ApiError && err.status === 409 && opts.force) {
|
||||
const existing = (await client.get<Array<{ id: string; name: string }>>('/api/v1/secrets')).find((s) => s.name === name);
|
||||
if (!existing) throw err;
|
||||
await client.put(`/api/v1/secrets/${existing.id}`, { data });
|
||||
log(`secret '${name}' updated (id: ${existing.id})`);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// --- create project ---
|
||||
cmd.command('project')
|
||||
.description('Create a project')
|
||||
.argument('<name>', 'Project name')
|
||||
.option('-d, --description <text>', 'Project description', '')
|
||||
.option('--force', 'Update if already exists')
|
||||
.action(async (name: string, opts) => {
|
||||
try {
|
||||
const project = await client.post<{ id: string; name: string }>('/api/v1/projects', {
|
||||
name,
|
||||
description: opts.description,
|
||||
});
|
||||
log(`project '${project.name}' created (id: ${project.id})`);
|
||||
} catch (err) {
|
||||
if (err instanceof ApiError && err.status === 409 && opts.force) {
|
||||
const existing = (await client.get<Array<{ id: string; name: string }>>('/api/v1/projects')).find((p) => p.name === name);
|
||||
if (!existing) throw err;
|
||||
await client.put(`/api/v1/projects/${existing.id}`, { description: opts.description });
|
||||
log(`project '${name}' updated (id: ${existing.id})`);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return cmd;
|
||||
}
|
||||
33
src/cli/src/commands/delete.ts
Normal file
33
src/cli/src/commands/delete.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { Command } from 'commander';
|
||||
import type { ApiClient } from '../api-client.js';
|
||||
import { resolveResource, resolveNameOrId } from './shared.js';
|
||||
|
||||
export interface DeleteCommandDeps {
|
||||
client: ApiClient;
|
||||
log: (...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
export function createDeleteCommand(deps: DeleteCommandDeps): Command {
|
||||
const { client, log } = deps;
|
||||
|
||||
return new Command('delete')
|
||||
.description('Delete a resource (server, instance, profile, project)')
|
||||
.argument('<resource>', 'resource type')
|
||||
.argument('<id>', 'resource ID or name')
|
||||
.action(async (resourceArg: string, idOrName: string) => {
|
||||
const resource = resolveResource(resourceArg);
|
||||
|
||||
// Resolve name → ID for any resource type
|
||||
let id: string;
|
||||
try {
|
||||
id = await resolveNameOrId(client, resource, idOrName);
|
||||
} catch {
|
||||
id = idOrName; // Fall through with original
|
||||
}
|
||||
|
||||
await client.delete(`/api/v1/${resource}/${id}`);
|
||||
|
||||
const singular = resource.replace(/s$/, '');
|
||||
log(`${singular} '${idOrName}' deleted.`);
|
||||
});
|
||||
}
|
||||
349
src/cli/src/commands/describe.ts
Normal file
349
src/cli/src/commands/describe.ts
Normal file
@@ -0,0 +1,349 @@
|
||||
import { Command } from 'commander';
|
||||
import { formatJson, formatYaml } from '../formatters/output.js';
|
||||
import { resolveResource, resolveNameOrId } from './shared.js';
|
||||
import type { ApiClient } from '../api-client.js';
|
||||
|
||||
export interface DescribeCommandDeps {
|
||||
client: ApiClient;
|
||||
fetchResource: (resource: string, id: string) => Promise<unknown>;
|
||||
fetchInspect?: (id: string) => Promise<unknown>;
|
||||
log: (...args: string[]) => void;
|
||||
}
|
||||
|
||||
function pad(label: string, width = 18): string {
|
||||
return label.padEnd(width);
|
||||
}
|
||||
|
||||
function formatServerDetail(server: Record<string, unknown>): string {
|
||||
const lines: string[] = [];
|
||||
lines.push(`=== Server: ${server.name} ===`);
|
||||
lines.push(`${pad('Name:')}${server.name}`);
|
||||
lines.push(`${pad('Transport:')}${server.transport ?? '-'}`);
|
||||
lines.push(`${pad('Replicas:')}${server.replicas ?? 1}`);
|
||||
if (server.dockerImage) lines.push(`${pad('Docker Image:')}${server.dockerImage}`);
|
||||
if (server.packageName) lines.push(`${pad('Package:')}${server.packageName}`);
|
||||
if (server.externalUrl) lines.push(`${pad('External URL:')}${server.externalUrl}`);
|
||||
if (server.repositoryUrl) lines.push(`${pad('Repository:')}${server.repositoryUrl}`);
|
||||
if (server.containerPort) lines.push(`${pad('Container Port:')}${server.containerPort}`);
|
||||
if (server.description) lines.push(`${pad('Description:')}${server.description}`);
|
||||
|
||||
const command = server.command as string[] | null;
|
||||
if (command && command.length > 0) {
|
||||
lines.push('');
|
||||
lines.push('Command:');
|
||||
lines.push(` ${command.join(' ')}`);
|
||||
}
|
||||
|
||||
const env = server.env as Array<{ name: string; value?: string; valueFrom?: { secretRef: { name: string; key: string } } }> | undefined;
|
||||
if (env && env.length > 0) {
|
||||
lines.push('');
|
||||
lines.push('Environment:');
|
||||
const nameW = Math.max(6, ...env.map((e) => e.name.length)) + 2;
|
||||
lines.push(` ${'NAME'.padEnd(nameW)}SOURCE`);
|
||||
for (const e of env) {
|
||||
if (e.value !== undefined) {
|
||||
lines.push(` ${e.name.padEnd(nameW)}${e.value}`);
|
||||
} else if (e.valueFrom?.secretRef) {
|
||||
const ref = e.valueFrom.secretRef;
|
||||
lines.push(` ${e.name.padEnd(nameW)}secret:${ref.name}/${ref.key}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const hc = server.healthCheck as { tool: string; arguments?: Record<string, unknown>; intervalSeconds?: number; timeoutSeconds?: number; failureThreshold?: number } | null;
|
||||
if (hc) {
|
||||
lines.push('');
|
||||
lines.push('Health Check:');
|
||||
lines.push(` ${pad('Tool:', 22)}${hc.tool}`);
|
||||
if (hc.arguments && Object.keys(hc.arguments).length > 0) {
|
||||
lines.push(` ${pad('Arguments:', 22)}${JSON.stringify(hc.arguments)}`);
|
||||
}
|
||||
lines.push(` ${pad('Interval:', 22)}${hc.intervalSeconds ?? 60}s`);
|
||||
lines.push(` ${pad('Timeout:', 22)}${hc.timeoutSeconds ?? 10}s`);
|
||||
lines.push(` ${pad('Failure Threshold:', 22)}${hc.failureThreshold ?? 3}`);
|
||||
}
|
||||
|
||||
lines.push('');
|
||||
lines.push('Metadata:');
|
||||
lines.push(` ${pad('ID:', 12)}${server.id}`);
|
||||
if (server.createdAt) lines.push(` ${pad('Created:', 12)}${server.createdAt}`);
|
||||
if (server.updatedAt) lines.push(` ${pad('Updated:', 12)}${server.updatedAt}`);
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function formatInstanceDetail(instance: Record<string, unknown>, inspect?: Record<string, unknown>): string {
|
||||
const lines: string[] = [];
|
||||
const server = instance.server as { name: string } | undefined;
|
||||
lines.push(`=== Instance: ${server?.name ?? instance.id} ===`);
|
||||
lines.push(`${pad('Status:')}${instance.status}`);
|
||||
lines.push(`${pad('Server:')}${server?.name ?? String(instance.serverId)}`);
|
||||
lines.push(`${pad('Container ID:')}${instance.containerId ?? '-'}`);
|
||||
lines.push(`${pad('Port:')}${instance.port ?? '-'}`);
|
||||
|
||||
// Health section
|
||||
const healthStatus = instance.healthStatus as string | null;
|
||||
const lastHealthCheck = instance.lastHealthCheck as string | null;
|
||||
if (healthStatus || lastHealthCheck) {
|
||||
lines.push('');
|
||||
lines.push('Health:');
|
||||
lines.push(` ${pad('Status:', 16)}${healthStatus ?? 'unknown'}`);
|
||||
if (lastHealthCheck) lines.push(` ${pad('Last Check:', 16)}${lastHealthCheck}`);
|
||||
}
|
||||
|
||||
const metadata = instance.metadata as Record<string, unknown> | undefined;
|
||||
if (metadata && Object.keys(metadata).length > 0) {
|
||||
lines.push('');
|
||||
lines.push('Metadata:');
|
||||
for (const [key, value] of Object.entries(metadata)) {
|
||||
lines.push(` ${pad(key + ':', 16)}${String(value)}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (inspect) {
|
||||
lines.push('');
|
||||
lines.push('Container:');
|
||||
for (const [key, value] of Object.entries(inspect)) {
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
lines.push(` ${key}: ${JSON.stringify(value)}`);
|
||||
} else {
|
||||
lines.push(` ${pad(key + ':', 16)}${String(value)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Events section (k8s-style)
|
||||
const events = instance.events as Array<{ timestamp: string; type: string; message: string }> | undefined;
|
||||
if (events && events.length > 0) {
|
||||
lines.push('');
|
||||
lines.push('Events:');
|
||||
const tsW = 26;
|
||||
const typeW = 10;
|
||||
lines.push(` ${'TIMESTAMP'.padEnd(tsW)}${'TYPE'.padEnd(typeW)}MESSAGE`);
|
||||
for (const ev of events) {
|
||||
lines.push(` ${(ev.timestamp ?? '').padEnd(tsW)}${(ev.type ?? '').padEnd(typeW)}${ev.message ?? ''}`);
|
||||
}
|
||||
}
|
||||
|
||||
lines.push('');
|
||||
lines.push(` ${pad('ID:', 12)}${instance.id}`);
|
||||
if (instance.createdAt) lines.push(` ${pad('Created:', 12)}${instance.createdAt}`);
|
||||
if (instance.updatedAt) lines.push(` ${pad('Updated:', 12)}${instance.updatedAt}`);
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function formatProjectDetail(project: Record<string, unknown>): string {
|
||||
const lines: string[] = [];
|
||||
lines.push(`=== Project: ${project.name} ===`);
|
||||
lines.push(`${pad('Name:')}${project.name}`);
|
||||
if (project.description) lines.push(`${pad('Description:')}${project.description}`);
|
||||
if (project.ownerId) lines.push(`${pad('Owner:')}${project.ownerId}`);
|
||||
|
||||
lines.push('');
|
||||
lines.push('Metadata:');
|
||||
lines.push(` ${pad('ID:', 12)}${project.id}`);
|
||||
if (project.createdAt) lines.push(` ${pad('Created:', 12)}${project.createdAt}`);
|
||||
if (project.updatedAt) lines.push(` ${pad('Updated:', 12)}${project.updatedAt}`);
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function formatSecretDetail(secret: Record<string, unknown>, showValues: boolean): string {
|
||||
const lines: string[] = [];
|
||||
lines.push(`=== Secret: ${secret.name} ===`);
|
||||
lines.push(`${pad('Name:')}${secret.name}`);
|
||||
|
||||
const data = secret.data as Record<string, string> | undefined;
|
||||
if (data && Object.keys(data).length > 0) {
|
||||
lines.push('');
|
||||
lines.push('Data:');
|
||||
const keyW = Math.max(4, ...Object.keys(data).map((k) => k.length)) + 2;
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
const display = showValues ? value : '***';
|
||||
lines.push(` ${key.padEnd(keyW)}${display}`);
|
||||
}
|
||||
if (!showValues) {
|
||||
lines.push('');
|
||||
lines.push(' (use --show-values to reveal)');
|
||||
}
|
||||
} else {
|
||||
lines.push(`${pad('Data:')}(empty)`);
|
||||
}
|
||||
|
||||
lines.push('');
|
||||
lines.push('Metadata:');
|
||||
lines.push(` ${pad('ID:', 12)}${secret.id}`);
|
||||
if (secret.createdAt) lines.push(` ${pad('Created:', 12)}${secret.createdAt}`);
|
||||
if (secret.updatedAt) lines.push(` ${pad('Updated:', 12)}${secret.updatedAt}`);
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function formatTemplateDetail(template: Record<string, unknown>): string {
|
||||
const lines: string[] = [];
|
||||
lines.push(`=== Template: ${template.name} ===`);
|
||||
lines.push(`${pad('Name:')}${template.name}`);
|
||||
lines.push(`${pad('Version:')}${template.version ?? '1.0.0'}`);
|
||||
lines.push(`${pad('Transport:')}${template.transport ?? 'STDIO'}`);
|
||||
lines.push(`${pad('Replicas:')}${template.replicas ?? 1}`);
|
||||
if (template.dockerImage) lines.push(`${pad('Docker Image:')}${template.dockerImage}`);
|
||||
if (template.packageName) lines.push(`${pad('Package:')}${template.packageName}`);
|
||||
if (template.externalUrl) lines.push(`${pad('External URL:')}${template.externalUrl}`);
|
||||
if (template.repositoryUrl) lines.push(`${pad('Repository:')}${template.repositoryUrl}`);
|
||||
if (template.containerPort) lines.push(`${pad('Container Port:')}${template.containerPort}`);
|
||||
if (template.description) lines.push(`${pad('Description:')}${template.description}`);
|
||||
|
||||
const command = template.command as string[] | null;
|
||||
if (command && command.length > 0) {
|
||||
lines.push('');
|
||||
lines.push('Command:');
|
||||
lines.push(` ${command.join(' ')}`);
|
||||
}
|
||||
|
||||
const env = template.env as Array<{ name: string; description?: string; required?: boolean; defaultValue?: string }> | undefined;
|
||||
if (env && env.length > 0) {
|
||||
lines.push('');
|
||||
lines.push('Environment Variables:');
|
||||
const nameW = Math.max(6, ...env.map((e) => e.name.length)) + 2;
|
||||
lines.push(` ${'NAME'.padEnd(nameW)}${'REQUIRED'.padEnd(10)}DESCRIPTION`);
|
||||
for (const e of env) {
|
||||
const req = e.required ? 'yes' : 'no';
|
||||
const desc = e.description ?? '';
|
||||
lines.push(` ${e.name.padEnd(nameW)}${req.padEnd(10)}${desc}`);
|
||||
}
|
||||
}
|
||||
|
||||
const hc = template.healthCheck as { tool: string; arguments?: Record<string, unknown>; intervalSeconds?: number; timeoutSeconds?: number; failureThreshold?: number } | null;
|
||||
if (hc) {
|
||||
lines.push('');
|
||||
lines.push('Health Check:');
|
||||
lines.push(` ${pad('Tool:', 22)}${hc.tool}`);
|
||||
if (hc.arguments && Object.keys(hc.arguments).length > 0) {
|
||||
lines.push(` ${pad('Arguments:', 22)}${JSON.stringify(hc.arguments)}`);
|
||||
}
|
||||
lines.push(` ${pad('Interval:', 22)}${hc.intervalSeconds ?? 60}s`);
|
||||
lines.push(` ${pad('Timeout:', 22)}${hc.timeoutSeconds ?? 10}s`);
|
||||
lines.push(` ${pad('Failure Threshold:', 22)}${hc.failureThreshold ?? 3}`);
|
||||
}
|
||||
|
||||
lines.push('');
|
||||
lines.push('Usage:');
|
||||
lines.push(` mcpctl create server my-${template.name} --from-template=${template.name}`);
|
||||
|
||||
lines.push('');
|
||||
lines.push('Metadata:');
|
||||
lines.push(` ${pad('ID:', 12)}${template.id}`);
|
||||
if (template.createdAt) lines.push(` ${pad('Created:', 12)}${template.createdAt}`);
|
||||
if (template.updatedAt) lines.push(` ${pad('Updated:', 12)}${template.updatedAt}`);
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
function formatGenericDetail(obj: Record<string, unknown>): string {
|
||||
const lines: string[] = [];
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
if (value === null || value === undefined) {
|
||||
lines.push(`${pad(key + ':')} -`);
|
||||
} else if (Array.isArray(value)) {
|
||||
if (value.length === 0) {
|
||||
lines.push(`${pad(key + ':')} []`);
|
||||
} else {
|
||||
lines.push(`${key}:`);
|
||||
for (const item of value) {
|
||||
lines.push(` - ${typeof item === 'object' ? JSON.stringify(item) : String(item)}`);
|
||||
}
|
||||
}
|
||||
} else if (typeof value === 'object') {
|
||||
lines.push(`${key}:`);
|
||||
for (const [k, v] of Object.entries(value as Record<string, unknown>)) {
|
||||
lines.push(` ${pad(k + ':')}${String(v)}`);
|
||||
}
|
||||
} else {
|
||||
lines.push(`${pad(key + ':')}${String(value)}`);
|
||||
}
|
||||
}
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
export function createDescribeCommand(deps: DescribeCommandDeps): Command {
|
||||
return new Command('describe')
|
||||
.description('Show detailed information about a resource')
|
||||
.argument('<resource>', 'resource type (server, project, instance)')
|
||||
.argument('<id>', 'resource ID or name')
|
||||
.option('-o, --output <format>', 'output format (detail, json, yaml)', 'detail')
|
||||
.option('--show-values', 'Show secret values (default: masked)')
|
||||
.action(async (resourceArg: string, idOrName: string, opts: { output: string; showValues?: boolean }) => {
|
||||
const resource = resolveResource(resourceArg);
|
||||
|
||||
// Resolve name → ID
|
||||
let id: string;
|
||||
if (resource === 'instances') {
|
||||
// Instances: accept instance ID or server name (resolve to first running instance)
|
||||
try {
|
||||
id = await resolveNameOrId(deps.client, resource, idOrName);
|
||||
} catch {
|
||||
// Not an instance ID — try as server name
|
||||
const servers = await deps.client.get<Array<{ id: string; name: string }>>('/api/v1/servers');
|
||||
const server = servers.find((s) => s.name === idOrName || s.id === idOrName);
|
||||
if (server) {
|
||||
const instances = await deps.client.get<Array<{ id: string; status: string }>>(`/api/v1/instances?serverId=${server.id}`);
|
||||
const running = instances.find((i) => i.status === 'RUNNING') ?? instances[0];
|
||||
if (running) {
|
||||
id = running.id;
|
||||
} else {
|
||||
throw new Error(`No instances found for server '${idOrName}'`);
|
||||
}
|
||||
} else {
|
||||
id = idOrName;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
id = await resolveNameOrId(deps.client, resource, idOrName);
|
||||
} catch {
|
||||
id = idOrName;
|
||||
}
|
||||
}
|
||||
|
||||
const item = await deps.fetchResource(resource, id) as Record<string, unknown>;
|
||||
|
||||
// Enrich instances with container inspect data
|
||||
let inspect: Record<string, unknown> | undefined;
|
||||
if (resource === 'instances' && deps.fetchInspect && item.containerId) {
|
||||
try {
|
||||
inspect = await deps.fetchInspect(id) as Record<string, unknown>;
|
||||
item.containerInspect = inspect;
|
||||
} catch {
|
||||
// Container may not be available
|
||||
}
|
||||
}
|
||||
|
||||
if (opts.output === 'json') {
|
||||
deps.log(formatJson(item));
|
||||
} else if (opts.output === 'yaml') {
|
||||
deps.log(formatYaml(item));
|
||||
} else {
|
||||
// Visually clean sectioned output
|
||||
switch (resource) {
|
||||
case 'servers':
|
||||
deps.log(formatServerDetail(item));
|
||||
break;
|
||||
case 'instances':
|
||||
deps.log(formatInstanceDetail(item, inspect));
|
||||
break;
|
||||
case 'secrets':
|
||||
deps.log(formatSecretDetail(item, opts.showValues === true));
|
||||
break;
|
||||
case 'templates':
|
||||
deps.log(formatTemplateDetail(item));
|
||||
break;
|
||||
case 'projects':
|
||||
deps.log(formatProjectDetail(item));
|
||||
break;
|
||||
default:
|
||||
deps.log(formatGenericDetail(item));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
114
src/cli/src/commands/edit.ts
Normal file
114
src/cli/src/commands/edit.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { Command } from 'commander';
|
||||
import { writeFileSync, readFileSync, unlinkSync, mkdtempSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { execSync } from 'node:child_process';
|
||||
import yaml from 'js-yaml';
|
||||
import type { ApiClient } from '../api-client.js';
|
||||
import { resolveResource, resolveNameOrId, stripInternalFields } from './shared.js';
|
||||
|
||||
export interface EditCommandDeps {
|
||||
client: ApiClient;
|
||||
log: (...args: unknown[]) => void;
|
||||
/** Override for testing — return editor binary name. */
|
||||
getEditor?: () => string;
|
||||
/** Override for testing — simulate opening the editor. */
|
||||
openEditor?: (filePath: string, editor: string) => void;
|
||||
}
|
||||
|
||||
function getEditor(deps: EditCommandDeps): string {
|
||||
if (deps.getEditor) return deps.getEditor();
|
||||
return process.env.VISUAL ?? process.env.EDITOR ?? 'vi';
|
||||
}
|
||||
|
||||
function openEditor(filePath: string, editor: string, deps: EditCommandDeps): void {
|
||||
if (deps.openEditor) {
|
||||
deps.openEditor(filePath, editor);
|
||||
return;
|
||||
}
|
||||
execSync(`${editor} "${filePath}"`, { stdio: 'inherit' });
|
||||
}
|
||||
|
||||
export function createEditCommand(deps: EditCommandDeps): Command {
|
||||
const { client, log } = deps;
|
||||
|
||||
return new Command('edit')
|
||||
.description('Edit a resource in your default editor (server, project)')
|
||||
.argument('<resource>', 'Resource type (server, project)')
|
||||
.argument('<name-or-id>', 'Resource name or ID')
|
||||
.action(async (resourceArg: string, nameOrId: string) => {
|
||||
const resource = resolveResource(resourceArg);
|
||||
|
||||
// Instances are immutable
|
||||
if (resource === 'instances') {
|
||||
log('Error: instances are immutable and cannot be edited.');
|
||||
log('To change an instance, update the server definition and let reconciliation handle it.');
|
||||
process.exitCode = 1;
|
||||
return;
|
||||
}
|
||||
|
||||
const validResources = ['servers', 'secrets', 'projects'];
|
||||
if (!validResources.includes(resource)) {
|
||||
log(`Error: unknown resource type '${resourceArg}'`);
|
||||
process.exitCode = 1;
|
||||
return;
|
||||
}
|
||||
|
||||
// Resolve name → ID
|
||||
const id = await resolveNameOrId(client, resource, nameOrId);
|
||||
|
||||
// Fetch current state
|
||||
const current = await client.get<Record<string, unknown>>(`/api/v1/${resource}/${id}`);
|
||||
|
||||
// Strip read-only fields for editor
|
||||
const editable = stripInternalFields(current);
|
||||
|
||||
// Serialize to YAML
|
||||
const singular = resource.replace(/s$/, '');
|
||||
const header = `# Editing ${singular}: ${nameOrId}\n# Save and close to apply changes. Clear the file to cancel.\n`;
|
||||
const originalYaml = yaml.dump(editable, { lineWidth: 120, noRefs: true });
|
||||
const content = header + originalYaml;
|
||||
|
||||
// Write to temp file
|
||||
const tmpDir = mkdtempSync(join(tmpdir(), 'mcpctl-edit-'));
|
||||
const tmpFile = join(tmpDir, `${singular}-${nameOrId}.yaml`);
|
||||
writeFileSync(tmpFile, content, 'utf-8');
|
||||
|
||||
try {
|
||||
// Open editor
|
||||
const editor = getEditor(deps);
|
||||
openEditor(tmpFile, editor, deps);
|
||||
|
||||
// Read back
|
||||
const modified = readFileSync(tmpFile, 'utf-8');
|
||||
|
||||
// Strip comments for comparison
|
||||
const modifiedClean = modified
|
||||
.split('\n')
|
||||
.filter((line) => !line.startsWith('#'))
|
||||
.join('\n')
|
||||
.trim();
|
||||
|
||||
if (!modifiedClean) {
|
||||
log('Edit cancelled (empty file).');
|
||||
return;
|
||||
}
|
||||
|
||||
if (modifiedClean === originalYaml.trim()) {
|
||||
log(`${singular} '${nameOrId}' unchanged.`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Parse and apply
|
||||
const updates = yaml.load(modifiedClean) as Record<string, unknown>;
|
||||
await client.put(`/api/v1/${resource}/${id}`, updates);
|
||||
log(`${singular} '${nameOrId}' updated.`);
|
||||
} finally {
|
||||
try {
|
||||
unlinkSync(tmpFile);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
146
src/cli/src/commands/get.ts
Normal file
146
src/cli/src/commands/get.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import { Command } from 'commander';
|
||||
import { formatTable } from '../formatters/table.js';
|
||||
import { formatJson, formatYaml } from '../formatters/output.js';
|
||||
import type { Column } from '../formatters/table.js';
|
||||
import { resolveResource, stripInternalFields } from './shared.js';
|
||||
|
||||
export interface GetCommandDeps {
|
||||
fetchResource: (resource: string, id?: string) => Promise<unknown[]>;
|
||||
log: (...args: string[]) => void;
|
||||
}
|
||||
|
||||
interface ServerRow {
|
||||
id: string;
|
||||
name: string;
|
||||
transport: string;
|
||||
packageName: string | null;
|
||||
dockerImage: string | null;
|
||||
}
|
||||
|
||||
interface ProjectRow {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
ownerId: string;
|
||||
}
|
||||
|
||||
interface SecretRow {
|
||||
id: string;
|
||||
name: string;
|
||||
data: Record<string, string>;
|
||||
}
|
||||
|
||||
interface TemplateRow {
|
||||
id: string;
|
||||
name: string;
|
||||
version: string;
|
||||
transport: string;
|
||||
packageName: string | null;
|
||||
description: string;
|
||||
}
|
||||
|
||||
interface InstanceRow {
|
||||
id: string;
|
||||
serverId: string;
|
||||
server?: { name: string };
|
||||
status: string;
|
||||
containerId: string | null;
|
||||
port: number | null;
|
||||
healthStatus: string | null;
|
||||
}
|
||||
|
||||
const serverColumns: Column<ServerRow>[] = [
|
||||
{ header: 'NAME', key: 'name' },
|
||||
{ header: 'TRANSPORT', key: 'transport', width: 16 },
|
||||
{ header: 'PACKAGE', key: (r) => r.packageName ?? '-' },
|
||||
{ header: 'IMAGE', key: (r) => r.dockerImage ?? '-' },
|
||||
{ header: 'ID', key: 'id' },
|
||||
];
|
||||
|
||||
const projectColumns: Column<ProjectRow>[] = [
|
||||
{ header: 'NAME', key: 'name' },
|
||||
{ header: 'DESCRIPTION', key: 'description', width: 40 },
|
||||
{ header: 'OWNER', key: 'ownerId' },
|
||||
{ header: 'ID', key: 'id' },
|
||||
];
|
||||
|
||||
const secretColumns: Column<SecretRow>[] = [
|
||||
{ header: 'NAME', key: 'name' },
|
||||
{ header: 'KEYS', key: (r) => Object.keys(r.data).join(', ') || '-', width: 40 },
|
||||
{ header: 'ID', key: 'id' },
|
||||
];
|
||||
|
||||
const templateColumns: Column<TemplateRow>[] = [
|
||||
{ header: 'NAME', key: 'name' },
|
||||
{ header: 'VERSION', key: 'version', width: 10 },
|
||||
{ header: 'TRANSPORT', key: 'transport', width: 16 },
|
||||
{ header: 'PACKAGE', key: (r) => r.packageName ?? '-' },
|
||||
{ header: 'DESCRIPTION', key: 'description', width: 50 },
|
||||
];
|
||||
|
||||
const instanceColumns: Column<InstanceRow>[] = [
|
||||
{ header: 'NAME', key: (r) => r.server?.name ?? '-', width: 20 },
|
||||
{ header: 'STATUS', key: 'status', width: 10 },
|
||||
{ header: 'HEALTH', key: (r) => r.healthStatus ?? '-', width: 10 },
|
||||
{ header: 'PORT', key: (r) => r.port != null ? String(r.port) : '-', width: 6 },
|
||||
{ header: 'CONTAINER', key: (r) => r.containerId ? r.containerId.slice(0, 12) : '-', width: 14 },
|
||||
{ header: 'ID', key: 'id' },
|
||||
];
|
||||
|
||||
function getColumnsForResource(resource: string): Column<Record<string, unknown>>[] {
|
||||
switch (resource) {
|
||||
case 'servers':
|
||||
return serverColumns as unknown as Column<Record<string, unknown>>[];
|
||||
case 'projects':
|
||||
return projectColumns as unknown as Column<Record<string, unknown>>[];
|
||||
case 'secrets':
|
||||
return secretColumns as unknown as Column<Record<string, unknown>>[];
|
||||
case 'templates':
|
||||
return templateColumns as unknown as Column<Record<string, unknown>>[];
|
||||
case 'instances':
|
||||
return instanceColumns as unknown as Column<Record<string, unknown>>[];
|
||||
default:
|
||||
return [
|
||||
{ header: 'ID', key: 'id' as keyof Record<string, unknown> },
|
||||
{ header: 'NAME', key: 'name' as keyof Record<string, unknown> },
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform API response items into apply-compatible format.
|
||||
* Strips internal fields and wraps in the resource key.
|
||||
*/
|
||||
function toApplyFormat(resource: string, items: unknown[]): Record<string, unknown[]> {
|
||||
const cleaned = items.map((item) => {
|
||||
return stripInternalFields(item as Record<string, unknown>);
|
||||
});
|
||||
return { [resource]: cleaned };
|
||||
}
|
||||
|
||||
export function createGetCommand(deps: GetCommandDeps): Command {
|
||||
return new Command('get')
|
||||
.description('List resources (servers, projects, instances)')
|
||||
.argument('<resource>', 'resource type (servers, projects, instances)')
|
||||
.argument('[id]', 'specific resource ID or name')
|
||||
.option('-o, --output <format>', 'output format (table, json, yaml)', 'table')
|
||||
.action(async (resourceArg: string, id: string | undefined, opts: { output: string }) => {
|
||||
const resource = resolveResource(resourceArg);
|
||||
const items = await deps.fetchResource(resource, id);
|
||||
|
||||
if (opts.output === 'json') {
|
||||
// Apply-compatible JSON wrapped in resource key
|
||||
deps.log(formatJson(toApplyFormat(resource, items)));
|
||||
} else if (opts.output === 'yaml') {
|
||||
// Apply-compatible YAML wrapped in resource key
|
||||
deps.log(formatYaml(toApplyFormat(resource, items)));
|
||||
} else {
|
||||
if (items.length === 0) {
|
||||
deps.log(`No ${resource} found.`);
|
||||
return;
|
||||
}
|
||||
const columns = getColumnsForResource(resource);
|
||||
deps.log(formatTable(items as Record<string, unknown>[], columns));
|
||||
}
|
||||
});
|
||||
}
|
||||
98
src/cli/src/commands/logs.ts
Normal file
98
src/cli/src/commands/logs.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { Command } from 'commander';
|
||||
import type { ApiClient } from '../api-client.js';
|
||||
|
||||
export interface LogsCommandDeps {
|
||||
client: ApiClient;
|
||||
log: (...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
interface InstanceInfo {
|
||||
id: string;
|
||||
status: string;
|
||||
containerId: string | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve a name/ID to an instance ID.
|
||||
* Accepts: instance ID, server name, or server ID.
|
||||
* For servers with multiple replicas, picks by --instance index or first RUNNING.
|
||||
*/
|
||||
async function resolveInstance(
|
||||
client: ApiClient,
|
||||
nameOrId: string,
|
||||
instanceIndex?: number,
|
||||
): Promise<{ instanceId: string; serverName?: string; replicaInfo?: string }> {
|
||||
// Try as instance ID first
|
||||
try {
|
||||
await client.get(`/api/v1/instances/${nameOrId}`);
|
||||
return { instanceId: nameOrId };
|
||||
} catch {
|
||||
// Not a valid instance ID
|
||||
}
|
||||
|
||||
// Try as server name/ID → find its instances
|
||||
const servers = await client.get<Array<{ id: string; name: string }>>('/api/v1/servers');
|
||||
const server = servers.find((s) => s.name === nameOrId || s.id === nameOrId);
|
||||
if (!server) {
|
||||
throw new Error(`Instance or server '${nameOrId}' not found`);
|
||||
}
|
||||
|
||||
const instances = await client.get<InstanceInfo[]>(`/api/v1/instances?serverId=${server.id}`);
|
||||
if (instances.length === 0) {
|
||||
throw new Error(`No instances found for server '${server.name}'`);
|
||||
}
|
||||
|
||||
// Select by index or pick first running
|
||||
let selected: InstanceInfo | undefined;
|
||||
if (instanceIndex !== undefined) {
|
||||
if (instanceIndex < 0 || instanceIndex >= instances.length) {
|
||||
throw new Error(`Instance index ${instanceIndex} out of range (server '${server.name}' has ${instances.length} instance${instances.length > 1 ? 's' : ''})`);
|
||||
}
|
||||
selected = instances[instanceIndex];
|
||||
} else {
|
||||
selected = instances.find((i) => i.status === 'RUNNING') ?? instances[0];
|
||||
}
|
||||
|
||||
if (!selected) {
|
||||
throw new Error(`No instances found for server '${server.name}'`);
|
||||
}
|
||||
|
||||
const result: { instanceId: string; serverName?: string; replicaInfo?: string } = {
|
||||
instanceId: selected.id,
|
||||
serverName: server.name,
|
||||
};
|
||||
if (instances.length > 1) {
|
||||
result.replicaInfo = `instance ${instances.indexOf(selected) + 1}/${instances.length}`;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function createLogsCommand(deps: LogsCommandDeps): Command {
|
||||
const { client, log } = deps;
|
||||
|
||||
return new Command('logs')
|
||||
.description('Get logs from an MCP server instance')
|
||||
.argument('<name>', 'Server name, server ID, or instance ID')
|
||||
.option('-t, --tail <lines>', 'Number of lines to show')
|
||||
.option('-i, --instance <index>', 'Instance/replica index (0-based, for servers with multiple replicas)')
|
||||
.action(async (nameOrId: string, opts: { tail?: string; instance?: string }) => {
|
||||
const instanceIndex = opts.instance !== undefined ? parseInt(opts.instance, 10) : undefined;
|
||||
const { instanceId, serverName, replicaInfo } = await resolveInstance(client, nameOrId, instanceIndex);
|
||||
|
||||
if (replicaInfo) {
|
||||
process.stderr.write(`Showing logs for ${serverName} (${replicaInfo})\n`);
|
||||
}
|
||||
|
||||
let url = `/api/v1/instances/${instanceId}/logs`;
|
||||
if (opts.tail) {
|
||||
url += `?tail=${opts.tail}`;
|
||||
}
|
||||
const logs = await client.get<{ stdout: string; stderr: string }>(url);
|
||||
if (logs.stdout) {
|
||||
log(logs.stdout);
|
||||
}
|
||||
if (logs.stderr) {
|
||||
process.stderr.write(logs.stderr);
|
||||
}
|
||||
});
|
||||
}
|
||||
15
src/cli/src/commands/project.ts
Normal file
15
src/cli/src/commands/project.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { Command } from 'commander';
|
||||
import type { ApiClient } from '../api-client.js';
|
||||
|
||||
export interface ProjectCommandDeps {
|
||||
client: ApiClient;
|
||||
log: (...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
export function createProjectCommand(_deps: ProjectCommandDeps): Command {
|
||||
const cmd = new Command('project')
|
||||
.alias('proj')
|
||||
.description('Project-specific actions (create with "create project", list with "get projects")');
|
||||
|
||||
return cmd;
|
||||
}
|
||||
44
src/cli/src/commands/shared.ts
Normal file
44
src/cli/src/commands/shared.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import type { ApiClient } from '../api-client.js';
|
||||
|
||||
export const RESOURCE_ALIASES: Record<string, string> = {
|
||||
server: 'servers',
|
||||
srv: 'servers',
|
||||
project: 'projects',
|
||||
proj: 'projects',
|
||||
instance: 'instances',
|
||||
inst: 'instances',
|
||||
secret: 'secrets',
|
||||
sec: 'secrets',
|
||||
template: 'templates',
|
||||
tpl: 'templates',
|
||||
};
|
||||
|
||||
export function resolveResource(name: string): string {
|
||||
const lower = name.toLowerCase();
|
||||
return RESOURCE_ALIASES[lower] ?? lower;
|
||||
}
|
||||
|
||||
/** Resolve a name-or-ID to an ID. CUIDs pass through; names are looked up. */
|
||||
export async function resolveNameOrId(
|
||||
client: ApiClient,
|
||||
resource: string,
|
||||
nameOrId: string,
|
||||
): Promise<string> {
|
||||
// CUIDs start with 'c' followed by 24+ alphanumeric chars
|
||||
if (/^c[a-z0-9]{24}/.test(nameOrId)) {
|
||||
return nameOrId;
|
||||
}
|
||||
const items = await client.get<Array<{ id: string; name: string }>>(`/api/v1/${resource}`);
|
||||
const match = items.find((item) => item.name === nameOrId);
|
||||
if (match) return match.id;
|
||||
throw new Error(`${resource.replace(/s$/, '')} '${nameOrId}' not found`);
|
||||
}
|
||||
|
||||
/** Strip internal/read-only fields from an API response to make it apply-compatible. */
|
||||
export function stripInternalFields(obj: Record<string, unknown>): Record<string, unknown> {
|
||||
const result = { ...obj };
|
||||
for (const key of ['id', 'createdAt', 'updatedAt', 'version', 'ownerId']) {
|
||||
delete result[key];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
77
src/cli/src/commands/status.ts
Normal file
77
src/cli/src/commands/status.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { Command } from 'commander';
|
||||
import http from 'node:http';
|
||||
import { loadConfig } from '../config/index.js';
|
||||
import type { ConfigLoaderDeps } from '../config/index.js';
|
||||
import { loadCredentials } from '../auth/index.js';
|
||||
import type { CredentialsDeps } from '../auth/index.js';
|
||||
import { formatJson, formatYaml } from '../formatters/index.js';
|
||||
import { APP_VERSION } from '@mcpctl/shared';
|
||||
|
||||
export interface StatusCommandDeps {
|
||||
configDeps: Partial<ConfigLoaderDeps>;
|
||||
credentialsDeps: Partial<CredentialsDeps>;
|
||||
log: (...args: string[]) => void;
|
||||
checkHealth: (url: string) => Promise<boolean>;
|
||||
}
|
||||
|
||||
function defaultCheckHealth(url: string): Promise<boolean> {
|
||||
return new Promise((resolve) => {
|
||||
const req = http.get(`${url}/health`, { timeout: 3000 }, (res) => {
|
||||
resolve(res.statusCode !== undefined && res.statusCode >= 200 && res.statusCode < 400);
|
||||
res.resume();
|
||||
});
|
||||
req.on('error', () => resolve(false));
|
||||
req.on('timeout', () => {
|
||||
req.destroy();
|
||||
resolve(false);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const defaultDeps: StatusCommandDeps = {
|
||||
configDeps: {},
|
||||
credentialsDeps: {},
|
||||
log: (...args) => console.log(...args),
|
||||
checkHealth: defaultCheckHealth,
|
||||
};
|
||||
|
||||
export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command {
|
||||
const { configDeps, credentialsDeps, log, checkHealth } = { ...defaultDeps, ...deps };
|
||||
|
||||
return new Command('status')
|
||||
.description('Show mcpctl status and connectivity')
|
||||
.option('-o, --output <format>', 'output format (table, json, yaml)', 'table')
|
||||
.action(async (opts: { output: string }) => {
|
||||
const config = loadConfig(configDeps);
|
||||
const creds = loadCredentials(credentialsDeps);
|
||||
|
||||
const [mcplocalReachable, mcpdReachable] = await Promise.all([
|
||||
checkHealth(config.mcplocalUrl),
|
||||
checkHealth(config.mcpdUrl),
|
||||
]);
|
||||
|
||||
const status = {
|
||||
version: APP_VERSION,
|
||||
mcplocalUrl: config.mcplocalUrl,
|
||||
mcplocalReachable,
|
||||
mcpdUrl: config.mcpdUrl,
|
||||
mcpdReachable,
|
||||
auth: creds ? { user: creds.user } : null,
|
||||
registries: config.registries,
|
||||
outputFormat: config.outputFormat,
|
||||
};
|
||||
|
||||
if (opts.output === 'json') {
|
||||
log(formatJson(status));
|
||||
} else if (opts.output === 'yaml') {
|
||||
log(formatYaml(status));
|
||||
} else {
|
||||
log(`mcpctl v${status.version}`);
|
||||
log(`mcplocal: ${status.mcplocalUrl} (${mcplocalReachable ? 'connected' : 'unreachable'})`);
|
||||
log(`mcpd: ${status.mcpdUrl} (${mcpdReachable ? 'connected' : 'unreachable'})`);
|
||||
log(`Auth: ${creds ? `logged in as ${creds.user}` : 'not logged in'}`);
|
||||
log(`Registries: ${status.registries.join(', ')}`);
|
||||
log(`Output: ${status.outputFormat}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
4
src/cli/src/config/index.ts
Normal file
4
src/cli/src/config/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export { McpctlConfigSchema, DEFAULT_CONFIG } from './schema.js';
|
||||
export type { McpctlConfig } from './schema.js';
|
||||
export { loadConfig, saveConfig, mergeConfig, getConfigPath } from './loader.js';
|
||||
export type { ConfigLoaderDeps } from './loader.js';
|
||||
45
src/cli/src/config/loader.ts
Normal file
45
src/cli/src/config/loader.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { homedir } from 'node:os';
|
||||
import { McpctlConfigSchema, DEFAULT_CONFIG } from './schema.js';
|
||||
import type { McpctlConfig } from './schema.js';
|
||||
|
||||
export interface ConfigLoaderDeps {
|
||||
configDir: string;
|
||||
}
|
||||
|
||||
function defaultConfigDir(): string {
|
||||
return join(homedir(), '.mcpctl');
|
||||
}
|
||||
|
||||
export function getConfigPath(configDir?: string): string {
|
||||
return join(configDir ?? defaultConfigDir(), 'config.json');
|
||||
}
|
||||
|
||||
export function loadConfig(deps?: Partial<ConfigLoaderDeps>): McpctlConfig {
|
||||
const configPath = getConfigPath(deps?.configDir);
|
||||
|
||||
if (!existsSync(configPath)) {
|
||||
return DEFAULT_CONFIG;
|
||||
}
|
||||
|
||||
const raw = readFileSync(configPath, 'utf-8');
|
||||
const parsed = JSON.parse(raw) as unknown;
|
||||
return McpctlConfigSchema.parse(parsed);
|
||||
}
|
||||
|
||||
export function saveConfig(config: McpctlConfig, deps?: Partial<ConfigLoaderDeps>): void {
|
||||
const dir = deps?.configDir ?? defaultConfigDir();
|
||||
const configPath = getConfigPath(dir);
|
||||
|
||||
if (!existsSync(dir)) {
|
||||
mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
|
||||
writeFileSync(configPath, JSON.stringify(config, null, 2) + '\n', 'utf-8');
|
||||
}
|
||||
|
||||
export function mergeConfig(overrides: Partial<McpctlConfig>, deps?: Partial<ConfigLoaderDeps>): McpctlConfig {
|
||||
const current = loadConfig(deps);
|
||||
return McpctlConfigSchema.parse({ ...current, ...overrides });
|
||||
}
|
||||
33
src/cli/src/config/schema.ts
Normal file
33
src/cli/src/config/schema.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export const McpctlConfigSchema = z.object({
|
||||
/** mcplocal daemon endpoint (local LLM pre-processing proxy) */
|
||||
mcplocalUrl: z.string().default('http://localhost:3200'),
|
||||
/** mcpd daemon endpoint (remote instance manager) */
|
||||
mcpdUrl: z.string().default('http://localhost:3100'),
|
||||
/** @deprecated Use mcplocalUrl instead. Kept for backward compatibility. */
|
||||
daemonUrl: z.string().optional(),
|
||||
/** Active registries for search */
|
||||
registries: z.array(z.enum(['official', 'glama', 'smithery'])).default(['official', 'glama', 'smithery']),
|
||||
/** Cache TTL in milliseconds */
|
||||
cacheTTLMs: z.number().int().positive().default(3_600_000),
|
||||
/** HTTP proxy URL */
|
||||
httpProxy: z.string().optional(),
|
||||
/** HTTPS proxy URL */
|
||||
httpsProxy: z.string().optional(),
|
||||
/** Default output format */
|
||||
outputFormat: z.enum(['table', 'json', 'yaml']).default('table'),
|
||||
/** Smithery API key */
|
||||
smitheryApiKey: z.string().optional(),
|
||||
}).transform((cfg) => {
|
||||
// Backward compatibility: if old daemonUrl is set but mcplocalUrl wasn't explicitly changed,
|
||||
// use daemonUrl as mcplocalUrl
|
||||
if (cfg.daemonUrl && cfg.mcplocalUrl === 'http://localhost:3200') {
|
||||
return { ...cfg, mcplocalUrl: cfg.daemonUrl };
|
||||
}
|
||||
return cfg;
|
||||
});
|
||||
|
||||
export type McpctlConfig = z.infer<typeof McpctlConfigSchema>;
|
||||
|
||||
export const DEFAULT_CONFIG: McpctlConfig = McpctlConfigSchema.parse({});
|
||||
4
src/cli/src/formatters/index.ts
Normal file
4
src/cli/src/formatters/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export { formatTable } from './table.js';
|
||||
export type { Column } from './table.js';
|
||||
export { formatJson, formatYaml } from './output.js';
|
||||
export type { OutputFormat } from './output.js';
|
||||
11
src/cli/src/formatters/output.ts
Normal file
11
src/cli/src/formatters/output.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import yaml from 'js-yaml';
|
||||
|
||||
export type OutputFormat = 'table' | 'json' | 'yaml';
|
||||
|
||||
export function formatJson(data: unknown): string {
|
||||
return JSON.stringify(data, null, 2);
|
||||
}
|
||||
|
||||
export function formatYaml(data: unknown): string {
|
||||
return yaml.dump(data, { lineWidth: 120, noRefs: true }).trimEnd();
|
||||
}
|
||||
44
src/cli/src/formatters/table.ts
Normal file
44
src/cli/src/formatters/table.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
export interface Column<T> {
|
||||
header: string;
|
||||
key: keyof T | ((row: T) => string);
|
||||
width?: number;
|
||||
align?: 'left' | 'right';
|
||||
}
|
||||
|
||||
export function formatTable<T>(rows: T[], columns: Column<T>[]): string {
|
||||
if (rows.length === 0) {
|
||||
return 'No results found.';
|
||||
}
|
||||
|
||||
const getValue = (row: T, col: Column<T>): string => {
|
||||
if (typeof col.key === 'function') {
|
||||
return col.key(row);
|
||||
}
|
||||
const val = row[col.key];
|
||||
return val == null ? '' : String(val);
|
||||
};
|
||||
|
||||
// Calculate column widths
|
||||
const widths = columns.map((col) => {
|
||||
if (col.width !== undefined) return col.width;
|
||||
const headerLen = col.header.length;
|
||||
const maxDataLen = rows.reduce((max, row) => {
|
||||
const val = getValue(row, col);
|
||||
return Math.max(max, val.length);
|
||||
}, 0);
|
||||
return Math.max(headerLen, maxDataLen);
|
||||
});
|
||||
|
||||
const pad = (text: string, width: number, align: 'left' | 'right' = 'left'): string => {
|
||||
const truncated = text.length > width ? text.slice(0, width - 1) + '\u2026' : text;
|
||||
return align === 'right' ? truncated.padStart(width) : truncated.padEnd(width);
|
||||
};
|
||||
|
||||
const headerLine = columns.map((col, i) => pad(col.header, widths[i] ?? 0, col.align ?? 'left')).join(' ');
|
||||
const separator = widths.map((w) => '-'.repeat(w)).join(' ');
|
||||
const dataLines = rows.map((row) =>
|
||||
columns.map((col, i) => pad(getValue(row, col), widths[i] ?? 0, col.align ?? 'left')).join(' '),
|
||||
);
|
||||
|
||||
return [headerLine, separator, ...dataLines].join('\n');
|
||||
}
|
||||
@@ -1,2 +1,163 @@
|
||||
// mcpctl CLI entry point
|
||||
// Will be implemented in Task 7
|
||||
#!/usr/bin/env node
|
||||
import { Command } from 'commander';
|
||||
import { APP_NAME, APP_VERSION } from '@mcpctl/shared';
|
||||
import { createConfigCommand } from './commands/config.js';
|
||||
import { createStatusCommand } from './commands/status.js';
|
||||
import { createGetCommand } from './commands/get.js';
|
||||
import { createDescribeCommand } from './commands/describe.js';
|
||||
import { createDeleteCommand } from './commands/delete.js';
|
||||
import { createLogsCommand } from './commands/logs.js';
|
||||
import { createApplyCommand } from './commands/apply.js';
|
||||
import { createCreateCommand } from './commands/create.js';
|
||||
import { createEditCommand } from './commands/edit.js';
|
||||
import { createClaudeCommand } from './commands/claude.js';
|
||||
import { createProjectCommand } from './commands/project.js';
|
||||
import { createBackupCommand, createRestoreCommand } from './commands/backup.js';
|
||||
import { createLoginCommand, createLogoutCommand } from './commands/auth.js';
|
||||
import { ApiClient, ApiError } from './api-client.js';
|
||||
import { loadConfig } from './config/index.js';
|
||||
import { loadCredentials } from './auth/index.js';
|
||||
import { resolveNameOrId } from './commands/shared.js';
|
||||
|
||||
export function createProgram(): Command {
|
||||
const program = new Command()
|
||||
.name(APP_NAME)
|
||||
.description('Manage MCP servers like kubectl manages containers')
|
||||
.version(APP_VERSION, '-v, --version')
|
||||
.enablePositionalOptions()
|
||||
.option('--daemon-url <url>', 'mcplocal daemon URL')
|
||||
.option('--direct', 'bypass mcplocal and connect directly to mcpd');
|
||||
|
||||
program.addCommand(createConfigCommand());
|
||||
program.addCommand(createStatusCommand());
|
||||
program.addCommand(createLoginCommand());
|
||||
program.addCommand(createLogoutCommand());
|
||||
|
||||
// Resolve target URL: --direct goes to mcpd, default goes to mcplocal
|
||||
const config = loadConfig();
|
||||
const creds = loadCredentials();
|
||||
const opts = program.opts();
|
||||
let baseUrl: string;
|
||||
if (opts.daemonUrl) {
|
||||
baseUrl = opts.daemonUrl as string;
|
||||
} else if (opts.direct) {
|
||||
baseUrl = config.mcpdUrl;
|
||||
} else {
|
||||
baseUrl = config.mcplocalUrl;
|
||||
}
|
||||
|
||||
const client = new ApiClient({ baseUrl, token: creds?.token ?? undefined });
|
||||
|
||||
const fetchResource = async (resource: string, nameOrId?: string): Promise<unknown[]> => {
|
||||
if (nameOrId) {
|
||||
// Glob pattern — use query param filtering
|
||||
if (nameOrId.includes('*')) {
|
||||
return client.get<unknown[]>(`/api/v1/${resource}?name=${encodeURIComponent(nameOrId)}`);
|
||||
}
|
||||
let id: string;
|
||||
try {
|
||||
id = await resolveNameOrId(client, resource, nameOrId);
|
||||
} catch {
|
||||
id = nameOrId;
|
||||
}
|
||||
const item = await client.get(`/api/v1/${resource}/${id}`);
|
||||
return [item];
|
||||
}
|
||||
return client.get<unknown[]>(`/api/v1/${resource}`);
|
||||
};
|
||||
|
||||
const fetchSingleResource = async (resource: string, nameOrId: string): Promise<unknown> => {
|
||||
let id: string;
|
||||
try {
|
||||
id = await resolveNameOrId(client, resource, nameOrId);
|
||||
} catch {
|
||||
id = nameOrId;
|
||||
}
|
||||
return client.get(`/api/v1/${resource}/${id}`);
|
||||
};
|
||||
|
||||
program.addCommand(createGetCommand({
|
||||
fetchResource,
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
program.addCommand(createDescribeCommand({
|
||||
client,
|
||||
fetchResource: fetchSingleResource,
|
||||
fetchInspect: async (id: string) => client.get(`/api/v1/instances/${id}/inspect`),
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
program.addCommand(createDeleteCommand({
|
||||
client,
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
program.addCommand(createLogsCommand({
|
||||
client,
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
program.addCommand(createCreateCommand({
|
||||
client,
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
program.addCommand(createEditCommand({
|
||||
client,
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
program.addCommand(createApplyCommand({
|
||||
client,
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
program.addCommand(createClaudeCommand({
|
||||
client,
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
program.addCommand(createProjectCommand({
|
||||
client,
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
program.addCommand(createBackupCommand({
|
||||
client,
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
program.addCommand(createRestoreCommand({
|
||||
client,
|
||||
log: (...args) => console.log(...args),
|
||||
}));
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
// Run when invoked directly
|
||||
const isDirectRun =
|
||||
typeof process !== 'undefined' &&
|
||||
process.argv[1] !== undefined &&
|
||||
import.meta.url === `file://${process.argv[1]}`;
|
||||
|
||||
if (isDirectRun) {
|
||||
createProgram().parseAsync(process.argv).catch((err: unknown) => {
|
||||
if (err instanceof ApiError) {
|
||||
let msg: string;
|
||||
try {
|
||||
const parsed = JSON.parse(err.body) as { error?: string; message?: string };
|
||||
msg = parsed.error ?? parsed.message ?? err.body;
|
||||
} catch {
|
||||
msg = err.body;
|
||||
}
|
||||
console.error(`Error: ${msg}`);
|
||||
} else if (err instanceof Error) {
|
||||
console.error(`Error: ${err.message}`);
|
||||
} else {
|
||||
console.error(`Error: ${String(err)}`);
|
||||
}
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
100
src/cli/tests/api-client.test.ts
Normal file
100
src/cli/tests/api-client.test.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
|
||||
import http from 'node:http';
|
||||
import { ApiClient, ApiError } from '../src/api-client.js';
|
||||
|
||||
let server: http.Server;
|
||||
let port: number;
|
||||
|
||||
beforeAll(async () => {
|
||||
server = http.createServer((req, res) => {
|
||||
if (req.url === '/api/v1/servers' && req.method === 'GET') {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify([{ id: 'srv-1', name: 'slack' }]));
|
||||
} else if (req.url === '/api/v1/servers/srv-1' && req.method === 'GET') {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ id: 'srv-1', name: 'slack', transport: 'STDIO' }));
|
||||
} else if (req.url === '/api/v1/servers' && req.method === 'POST') {
|
||||
const chunks: Buffer[] = [];
|
||||
req.on('data', (c: Buffer) => chunks.push(c));
|
||||
req.on('end', () => {
|
||||
const body = JSON.parse(Buffer.concat(chunks).toString());
|
||||
res.writeHead(201, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ id: 'srv-new', ...body }));
|
||||
});
|
||||
} else if (req.url === '/api/v1/missing' && req.method === 'GET') {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Not found' }));
|
||||
} else {
|
||||
res.writeHead(404);
|
||||
res.end();
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
server.listen(0, () => {
|
||||
const addr = server.address();
|
||||
if (addr && typeof addr === 'object') {
|
||||
port = addr.port;
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
server.close();
|
||||
});
|
||||
|
||||
describe('ApiClient', () => {
|
||||
it('performs GET request for list', async () => {
|
||||
const client = new ApiClient({ baseUrl: `http://localhost:${port}` });
|
||||
const result = await client.get<Array<{ id: string; name: string }>>('/api/v1/servers');
|
||||
expect(result).toEqual([{ id: 'srv-1', name: 'slack' }]);
|
||||
});
|
||||
|
||||
it('performs GET request for single item', async () => {
|
||||
const client = new ApiClient({ baseUrl: `http://localhost:${port}` });
|
||||
const result = await client.get<{ id: string; name: string }>('/api/v1/servers/srv-1');
|
||||
expect(result.name).toBe('slack');
|
||||
});
|
||||
|
||||
it('performs POST request', async () => {
|
||||
const client = new ApiClient({ baseUrl: `http://localhost:${port}` });
|
||||
const result = await client.post<{ id: string; name: string }>('/api/v1/servers', { name: 'github' });
|
||||
expect(result.id).toBe('srv-new');
|
||||
expect(result.name).toBe('github');
|
||||
});
|
||||
|
||||
it('throws ApiError on 404', async () => {
|
||||
const client = new ApiClient({ baseUrl: `http://localhost:${port}` });
|
||||
await expect(client.get('/api/v1/missing')).rejects.toThrow(ApiError);
|
||||
});
|
||||
|
||||
it('throws on connection error', async () => {
|
||||
const client = new ApiClient({ baseUrl: 'http://localhost:1' });
|
||||
await expect(client.get('/anything')).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('sends Authorization header when token provided', async () => {
|
||||
// We need a separate server to check the header
|
||||
let receivedAuth = '';
|
||||
const authServer = http.createServer((req, res) => {
|
||||
receivedAuth = req.headers['authorization'] ?? '';
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ ok: true }));
|
||||
});
|
||||
const authPort = await new Promise<number>((resolve) => {
|
||||
authServer.listen(0, () => {
|
||||
const addr = authServer.address();
|
||||
if (addr && typeof addr === 'object') resolve(addr.port);
|
||||
});
|
||||
});
|
||||
try {
|
||||
const client = new ApiClient({ baseUrl: `http://localhost:${authPort}`, token: 'my-token' });
|
||||
await client.get('/test');
|
||||
expect(receivedAuth).toBe('Bearer my-token');
|
||||
} finally {
|
||||
authServer.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
59
src/cli/tests/auth/credentials.test.ts
Normal file
59
src/cli/tests/auth/credentials.test.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { mkdtempSync, rmSync, statSync, existsSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { saveCredentials, loadCredentials, deleteCredentials } from '../../src/auth/index.js';
|
||||
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-auth-test-'));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('saveCredentials', () => {
|
||||
it('saves credentials file', () => {
|
||||
saveCredentials({ token: 'tok123', mcpdUrl: 'http://x:3100', user: 'alice@test.com' }, { configDir: tempDir });
|
||||
expect(existsSync(join(tempDir, 'credentials'))).toBe(true);
|
||||
});
|
||||
|
||||
it('sets 0600 permissions', () => {
|
||||
saveCredentials({ token: 'tok123', mcpdUrl: 'http://x:3100', user: 'alice@test.com' }, { configDir: tempDir });
|
||||
const stat = statSync(join(tempDir, 'credentials'));
|
||||
expect(stat.mode & 0o777).toBe(0o600);
|
||||
});
|
||||
|
||||
it('creates config dir if missing', () => {
|
||||
const nested = join(tempDir, 'sub', 'dir');
|
||||
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'bob' }, { configDir: nested });
|
||||
expect(existsSync(join(nested, 'credentials'))).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadCredentials', () => {
|
||||
it('returns null when no credentials file', () => {
|
||||
expect(loadCredentials({ configDir: tempDir })).toBeNull();
|
||||
});
|
||||
|
||||
it('round-trips credentials', () => {
|
||||
const creds = { token: 'tok456', mcpdUrl: 'http://remote:3100', user: 'charlie@test.com', expiresAt: '2099-01-01' };
|
||||
saveCredentials(creds, { configDir: tempDir });
|
||||
const loaded = loadCredentials({ configDir: tempDir });
|
||||
expect(loaded).toEqual(creds);
|
||||
});
|
||||
});
|
||||
|
||||
describe('deleteCredentials', () => {
|
||||
it('returns false when no credentials file', () => {
|
||||
expect(deleteCredentials({ configDir: tempDir })).toBe(false);
|
||||
});
|
||||
|
||||
it('deletes credentials file', () => {
|
||||
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'u' }, { configDir: tempDir });
|
||||
expect(deleteCredentials({ configDir: tempDir })).toBe(true);
|
||||
expect(existsSync(join(tempDir, 'credentials'))).toBe(false);
|
||||
});
|
||||
});
|
||||
39
src/cli/tests/cli.test.ts
Normal file
39
src/cli/tests/cli.test.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { createProgram } from '../src/index.js';
|
||||
|
||||
describe('createProgram', () => {
|
||||
it('creates a Commander program', () => {
|
||||
const program = createProgram();
|
||||
expect(program.name()).toBe('mcpctl');
|
||||
});
|
||||
|
||||
it('has version flag', () => {
|
||||
const program = createProgram();
|
||||
expect(program.version()).toBe('0.1.0');
|
||||
});
|
||||
|
||||
it('has config subcommand', () => {
|
||||
const program = createProgram();
|
||||
const config = program.commands.find((c) => c.name() === 'config');
|
||||
expect(config).toBeDefined();
|
||||
});
|
||||
|
||||
it('has status subcommand', () => {
|
||||
const program = createProgram();
|
||||
const status = program.commands.find((c) => c.name() === 'status');
|
||||
expect(status).toBeDefined();
|
||||
});
|
||||
|
||||
it('subcommands have output option', () => {
|
||||
const program = createProgram();
|
||||
const get = program.commands.find((c) => c.name() === 'get');
|
||||
const opt = get?.options.find((o) => o.long === '--output');
|
||||
expect(opt).toBeDefined();
|
||||
});
|
||||
|
||||
it('has daemon-url option', () => {
|
||||
const program = createProgram();
|
||||
const opt = program.options.find((o) => o.long === '--daemon-url');
|
||||
expect(opt).toBeDefined();
|
||||
});
|
||||
});
|
||||
162
src/cli/tests/commands/apply.test.ts
Normal file
162
src/cli/tests/commands/apply.test.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { writeFileSync, mkdtempSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { createApplyCommand } from '../../src/commands/apply.js';
|
||||
import type { ApiClient } from '../../src/api-client.js';
|
||||
|
||||
function mockClient(): ApiClient {
|
||||
return {
|
||||
get: vi.fn(async () => []),
|
||||
post: vi.fn(async () => ({ id: 'new-id', name: 'test' })),
|
||||
put: vi.fn(async () => ({ id: 'existing-id', name: 'test' })),
|
||||
delete: vi.fn(async () => {}),
|
||||
} as unknown as ApiClient;
|
||||
}
|
||||
|
||||
describe('apply command', () => {
|
||||
let client: ReturnType<typeof mockClient>;
|
||||
let output: string[];
|
||||
let tmpDir: string;
|
||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
||||
|
||||
beforeEach(() => {
|
||||
client = mockClient();
|
||||
output = [];
|
||||
tmpDir = mkdtempSync(join(tmpdir(), 'mcpctl-test-'));
|
||||
});
|
||||
|
||||
it('applies servers from YAML file', async () => {
|
||||
const configPath = join(tmpDir, 'config.yaml');
|
||||
writeFileSync(configPath, `
|
||||
servers:
|
||||
- name: slack
|
||||
description: Slack MCP server
|
||||
transport: STDIO
|
||||
packageName: "@anthropic/slack-mcp"
|
||||
`);
|
||||
|
||||
const cmd = createApplyCommand({ client, log });
|
||||
await cmd.parseAsync([configPath], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/servers', expect.objectContaining({ name: 'slack' }));
|
||||
expect(output.join('\n')).toContain('Created server: slack');
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('applies servers from JSON file', async () => {
|
||||
const configPath = join(tmpDir, 'config.json');
|
||||
writeFileSync(configPath, JSON.stringify({
|
||||
servers: [{ name: 'github', transport: 'STDIO' }],
|
||||
}));
|
||||
|
||||
const cmd = createApplyCommand({ client, log });
|
||||
await cmd.parseAsync([configPath], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/servers', expect.objectContaining({ name: 'github' }));
|
||||
expect(output.join('\n')).toContain('Created server: github');
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('updates existing servers', async () => {
|
||||
vi.mocked(client.get).mockResolvedValue([{ id: 'srv-1', name: 'slack' }]);
|
||||
|
||||
const configPath = join(tmpDir, 'config.yaml');
|
||||
writeFileSync(configPath, `
|
||||
servers:
|
||||
- name: slack
|
||||
description: Updated description
|
||||
transport: STDIO
|
||||
`);
|
||||
|
||||
const cmd = createApplyCommand({ client, log });
|
||||
await cmd.parseAsync([configPath], { from: 'user' });
|
||||
|
||||
expect(client.put).toHaveBeenCalledWith('/api/v1/servers/srv-1', expect.objectContaining({ name: 'slack' }));
|
||||
expect(output.join('\n')).toContain('Updated server: slack');
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('supports dry-run mode', async () => {
|
||||
const configPath = join(tmpDir, 'config.yaml');
|
||||
writeFileSync(configPath, `
|
||||
servers:
|
||||
- name: test
|
||||
transport: STDIO
|
||||
`);
|
||||
|
||||
const cmd = createApplyCommand({ client, log });
|
||||
await cmd.parseAsync([configPath, '--dry-run'], { from: 'user' });
|
||||
|
||||
expect(client.post).not.toHaveBeenCalled();
|
||||
expect(output.join('\n')).toContain('Dry run');
|
||||
expect(output.join('\n')).toContain('1 server(s)');
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('applies secrets', async () => {
|
||||
const configPath = join(tmpDir, 'config.yaml');
|
||||
writeFileSync(configPath, `
|
||||
secrets:
|
||||
- name: ha-creds
|
||||
data:
|
||||
TOKEN: abc123
|
||||
URL: https://ha.local
|
||||
`);
|
||||
|
||||
const cmd = createApplyCommand({ client, log });
|
||||
await cmd.parseAsync([configPath], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/secrets', expect.objectContaining({
|
||||
name: 'ha-creds',
|
||||
data: { TOKEN: 'abc123', URL: 'https://ha.local' },
|
||||
}));
|
||||
expect(output.join('\n')).toContain('Created secret: ha-creds');
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('updates existing secrets', async () => {
|
||||
vi.mocked(client.get).mockImplementation(async (url: string) => {
|
||||
if (url === '/api/v1/secrets') return [{ id: 'sec-1', name: 'ha-creds' }];
|
||||
return [];
|
||||
});
|
||||
|
||||
const configPath = join(tmpDir, 'config.yaml');
|
||||
writeFileSync(configPath, `
|
||||
secrets:
|
||||
- name: ha-creds
|
||||
data:
|
||||
TOKEN: new-token
|
||||
`);
|
||||
|
||||
const cmd = createApplyCommand({ client, log });
|
||||
await cmd.parseAsync([configPath], { from: 'user' });
|
||||
|
||||
expect(client.put).toHaveBeenCalledWith('/api/v1/secrets/sec-1', { data: { TOKEN: 'new-token' } });
|
||||
expect(output.join('\n')).toContain('Updated secret: ha-creds');
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('applies projects', async () => {
|
||||
const configPath = join(tmpDir, 'config.yaml');
|
||||
writeFileSync(configPath, `
|
||||
projects:
|
||||
- name: my-project
|
||||
description: A test project
|
||||
`);
|
||||
|
||||
const cmd = createApplyCommand({ client, log });
|
||||
await cmd.parseAsync([configPath], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/projects', expect.objectContaining({ name: 'my-project' }));
|
||||
expect(output.join('\n')).toContain('Created project: my-project');
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
144
src/cli/tests/commands/auth.test.ts
Normal file
144
src/cli/tests/commands/auth.test.ts
Normal file
@@ -0,0 +1,144 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { createLoginCommand, createLogoutCommand } from '../../src/commands/auth.js';
|
||||
import { saveCredentials, loadCredentials } from '../../src/auth/index.js';
|
||||
import { saveConfig, DEFAULT_CONFIG } from '../../src/config/index.js';
|
||||
|
||||
let tempDir: string;
|
||||
let output: string[];
|
||||
|
||||
function log(...args: string[]) {
|
||||
output.push(args.join(' '));
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-auth-cmd-test-'));
|
||||
output = [];
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('login command', () => {
|
||||
it('stores credentials on successful login', async () => {
|
||||
const cmd = createLoginCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
prompt: {
|
||||
input: async () => 'alice@test.com',
|
||||
password: async () => 'secret123',
|
||||
},
|
||||
log,
|
||||
loginRequest: async (_url, email, _password) => ({
|
||||
token: 'session-token-123',
|
||||
user: { email },
|
||||
}),
|
||||
logoutRequest: async () => {},
|
||||
});
|
||||
await cmd.parseAsync([], { from: 'user' });
|
||||
expect(output[0]).toContain('Logged in as alice@test.com');
|
||||
|
||||
const creds = loadCredentials({ configDir: tempDir });
|
||||
expect(creds).not.toBeNull();
|
||||
expect(creds!.token).toBe('session-token-123');
|
||||
expect(creds!.user).toBe('alice@test.com');
|
||||
});
|
||||
|
||||
it('shows error on failed login', async () => {
|
||||
const cmd = createLoginCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
prompt: {
|
||||
input: async () => 'alice@test.com',
|
||||
password: async () => 'wrong',
|
||||
},
|
||||
log,
|
||||
loginRequest: async () => { throw new Error('Invalid credentials'); },
|
||||
logoutRequest: async () => {},
|
||||
});
|
||||
await cmd.parseAsync([], { from: 'user' });
|
||||
expect(output[0]).toContain('Login failed');
|
||||
expect(output[0]).toContain('Invalid credentials');
|
||||
|
||||
const creds = loadCredentials({ configDir: tempDir });
|
||||
expect(creds).toBeNull();
|
||||
});
|
||||
|
||||
it('uses mcpdUrl from config', async () => {
|
||||
saveConfig({ ...DEFAULT_CONFIG, mcpdUrl: 'http://custom:3100' }, { configDir: tempDir });
|
||||
let capturedUrl = '';
|
||||
const cmd = createLoginCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
prompt: {
|
||||
input: async () => 'user@test.com',
|
||||
password: async () => 'pass',
|
||||
},
|
||||
log,
|
||||
loginRequest: async (url, email) => {
|
||||
capturedUrl = url;
|
||||
return { token: 'tok', user: { email } };
|
||||
},
|
||||
logoutRequest: async () => {},
|
||||
});
|
||||
await cmd.parseAsync([], { from: 'user' });
|
||||
expect(capturedUrl).toBe('http://custom:3100');
|
||||
});
|
||||
|
||||
it('allows --mcpd-url flag override', async () => {
|
||||
let capturedUrl = '';
|
||||
const cmd = createLoginCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
prompt: {
|
||||
input: async () => 'user@test.com',
|
||||
password: async () => 'pass',
|
||||
},
|
||||
log,
|
||||
loginRequest: async (url, email) => {
|
||||
capturedUrl = url;
|
||||
return { token: 'tok', user: { email } };
|
||||
},
|
||||
logoutRequest: async () => {},
|
||||
});
|
||||
await cmd.parseAsync(['--mcpd-url', 'http://override:3100'], { from: 'user' });
|
||||
expect(capturedUrl).toBe('http://override:3100');
|
||||
});
|
||||
});
|
||||
|
||||
describe('logout command', () => {
|
||||
it('removes credentials on logout', async () => {
|
||||
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'alice' }, { configDir: tempDir });
|
||||
let logoutCalled = false;
|
||||
const cmd = createLogoutCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
prompt: { input: async () => '', password: async () => '' },
|
||||
log,
|
||||
loginRequest: async () => ({ token: '', user: { email: '' } }),
|
||||
logoutRequest: async () => { logoutCalled = true; },
|
||||
});
|
||||
await cmd.parseAsync([], { from: 'user' });
|
||||
expect(output[0]).toContain('Logged out successfully');
|
||||
expect(logoutCalled).toBe(true);
|
||||
|
||||
const creds = loadCredentials({ configDir: tempDir });
|
||||
expect(creds).toBeNull();
|
||||
});
|
||||
|
||||
it('shows not logged in when no credentials', async () => {
|
||||
const cmd = createLogoutCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
prompt: { input: async () => '', password: async () => '' },
|
||||
log,
|
||||
loginRequest: async () => ({ token: '', user: { email: '' } }),
|
||||
logoutRequest: async () => {},
|
||||
});
|
||||
await cmd.parseAsync([], { from: 'user' });
|
||||
expect(output[0]).toContain('Not logged in');
|
||||
});
|
||||
});
|
||||
120
src/cli/tests/commands/backup.test.ts
Normal file
120
src/cli/tests/commands/backup.test.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import fs from 'node:fs';
|
||||
import { createBackupCommand, createRestoreCommand } from '../../src/commands/backup.js';
|
||||
|
||||
const mockClient = {
|
||||
get: vi.fn(),
|
||||
post: vi.fn(),
|
||||
put: vi.fn(),
|
||||
delete: vi.fn(),
|
||||
};
|
||||
|
||||
const log = vi.fn();
|
||||
|
||||
describe('backup command', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Clean up any created files
|
||||
try { fs.unlinkSync('test-backup.json'); } catch { /* ignore */ }
|
||||
});
|
||||
|
||||
it('creates backup command', () => {
|
||||
const cmd = createBackupCommand({ client: mockClient as never, log });
|
||||
expect(cmd.name()).toBe('backup');
|
||||
});
|
||||
|
||||
it('calls API and writes file', async () => {
|
||||
const bundle = { version: '1', servers: [], profiles: [], projects: [] };
|
||||
mockClient.post.mockResolvedValue(bundle);
|
||||
|
||||
const cmd = createBackupCommand({ client: mockClient as never, log });
|
||||
await cmd.parseAsync(['-o', 'test-backup.json'], { from: 'user' });
|
||||
|
||||
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/backup', {});
|
||||
expect(fs.existsSync('test-backup.json')).toBe(true);
|
||||
expect(log).toHaveBeenCalledWith(expect.stringContaining('test-backup.json'));
|
||||
});
|
||||
|
||||
it('passes password when provided', async () => {
|
||||
mockClient.post.mockResolvedValue({ version: '1', servers: [], profiles: [], projects: [] });
|
||||
|
||||
const cmd = createBackupCommand({ client: mockClient as never, log });
|
||||
await cmd.parseAsync(['-o', 'test-backup.json', '-p', 'secret'], { from: 'user' });
|
||||
|
||||
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/backup', { password: 'secret' });
|
||||
});
|
||||
|
||||
it('passes resource filter', async () => {
|
||||
mockClient.post.mockResolvedValue({ version: '1', servers: [], profiles: [], projects: [] });
|
||||
|
||||
const cmd = createBackupCommand({ client: mockClient as never, log });
|
||||
await cmd.parseAsync(['-o', 'test-backup.json', '-r', 'servers,profiles'], { from: 'user' });
|
||||
|
||||
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/backup', {
|
||||
resources: ['servers', 'profiles'],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('restore command', () => {
|
||||
const testFile = 'test-restore-input.json';
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks();
|
||||
fs.writeFileSync(testFile, JSON.stringify({
|
||||
version: '1', servers: [], profiles: [], projects: [],
|
||||
}));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
try { fs.unlinkSync(testFile); } catch { /* ignore */ }
|
||||
});
|
||||
|
||||
it('creates restore command', () => {
|
||||
const cmd = createRestoreCommand({ client: mockClient as never, log });
|
||||
expect(cmd.name()).toBe('restore');
|
||||
});
|
||||
|
||||
it('reads file and calls API', async () => {
|
||||
mockClient.post.mockResolvedValue({
|
||||
serversCreated: 1, serversSkipped: 0,
|
||||
profilesCreated: 0, profilesSkipped: 0,
|
||||
projectsCreated: 0, projectsSkipped: 0,
|
||||
errors: [],
|
||||
});
|
||||
|
||||
const cmd = createRestoreCommand({ client: mockClient as never, log });
|
||||
await cmd.parseAsync(['-i', testFile], { from: 'user' });
|
||||
|
||||
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/restore', expect.objectContaining({
|
||||
bundle: expect.objectContaining({ version: '1' }),
|
||||
conflictStrategy: 'skip',
|
||||
}));
|
||||
expect(log).toHaveBeenCalledWith('Restore complete:');
|
||||
});
|
||||
|
||||
it('reports errors from restore', async () => {
|
||||
mockClient.post.mockResolvedValue({
|
||||
serversCreated: 0, serversSkipped: 0,
|
||||
profilesCreated: 0, profilesSkipped: 0,
|
||||
projectsCreated: 0, projectsSkipped: 0,
|
||||
errors: ['Server "x" already exists'],
|
||||
});
|
||||
|
||||
const cmd = createRestoreCommand({ client: mockClient as never, log });
|
||||
await cmd.parseAsync(['-i', testFile], { from: 'user' });
|
||||
|
||||
expect(log).toHaveBeenCalledWith(expect.stringContaining('Errors'));
|
||||
});
|
||||
|
||||
it('logs error for missing file', async () => {
|
||||
const cmd = createRestoreCommand({ client: mockClient as never, log });
|
||||
await cmd.parseAsync(['-i', 'nonexistent.json'], { from: 'user' });
|
||||
|
||||
expect(log).toHaveBeenCalledWith(expect.stringContaining('not found'));
|
||||
expect(mockClient.post).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
158
src/cli/tests/commands/claude.test.ts
Normal file
158
src/cli/tests/commands/claude.test.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { writeFileSync, readFileSync, mkdtempSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { createClaudeCommand } from '../../src/commands/claude.js';
|
||||
import type { ApiClient } from '../../src/api-client.js';
|
||||
|
||||
function mockClient(): ApiClient {
|
||||
return {
|
||||
get: vi.fn(async () => ({
|
||||
mcpServers: {
|
||||
'slack--default': { command: 'npx', args: ['-y', '@anthropic/slack-mcp'], env: { WORKSPACE: 'test' } },
|
||||
'github--default': { command: 'npx', args: ['-y', '@anthropic/github-mcp'] },
|
||||
},
|
||||
})),
|
||||
post: vi.fn(async () => ({})),
|
||||
put: vi.fn(async () => ({})),
|
||||
delete: vi.fn(async () => {}),
|
||||
} as unknown as ApiClient;
|
||||
}
|
||||
|
||||
describe('claude command', () => {
|
||||
let client: ReturnType<typeof mockClient>;
|
||||
let output: string[];
|
||||
let tmpDir: string;
|
||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
||||
|
||||
beforeEach(() => {
|
||||
client = mockClient();
|
||||
output = [];
|
||||
tmpDir = mkdtempSync(join(tmpdir(), 'mcpctl-claude-'));
|
||||
});
|
||||
|
||||
describe('generate', () => {
|
||||
it('generates .mcp.json from project config', async () => {
|
||||
const outPath = join(tmpDir, '.mcp.json');
|
||||
const cmd = createClaudeCommand({ client, log });
|
||||
await cmd.parseAsync(['generate', 'proj-1', '-o', outPath], { from: 'user' });
|
||||
|
||||
expect(client.get).toHaveBeenCalledWith('/api/v1/projects/proj-1/mcp-config');
|
||||
const written = JSON.parse(readFileSync(outPath, 'utf-8'));
|
||||
expect(written.mcpServers['slack--default']).toBeDefined();
|
||||
expect(output.join('\n')).toContain('2 server(s)');
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('prints to stdout with --stdout', async () => {
|
||||
const cmd = createClaudeCommand({ client, log });
|
||||
await cmd.parseAsync(['generate', 'proj-1', '--stdout'], { from: 'user' });
|
||||
|
||||
expect(output[0]).toContain('mcpServers');
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('merges with existing .mcp.json', async () => {
|
||||
const outPath = join(tmpDir, '.mcp.json');
|
||||
writeFileSync(outPath, JSON.stringify({
|
||||
mcpServers: { 'existing--server': { command: 'echo', args: [] } },
|
||||
}));
|
||||
|
||||
const cmd = createClaudeCommand({ client, log });
|
||||
await cmd.parseAsync(['generate', 'proj-1', '-o', outPath, '--merge'], { from: 'user' });
|
||||
|
||||
const written = JSON.parse(readFileSync(outPath, 'utf-8'));
|
||||
expect(written.mcpServers['existing--server']).toBeDefined();
|
||||
expect(written.mcpServers['slack--default']).toBeDefined();
|
||||
expect(output.join('\n')).toContain('3 server(s)');
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('show', () => {
|
||||
it('shows servers in .mcp.json', () => {
|
||||
const filePath = join(tmpDir, '.mcp.json');
|
||||
writeFileSync(filePath, JSON.stringify({
|
||||
mcpServers: {
|
||||
'slack': { command: 'npx', args: ['-y', '@anthropic/slack-mcp'], env: { TOKEN: 'x' } },
|
||||
},
|
||||
}));
|
||||
|
||||
const cmd = createClaudeCommand({ client, log });
|
||||
cmd.parseAsync(['show', '-p', filePath], { from: 'user' });
|
||||
|
||||
expect(output.join('\n')).toContain('slack');
|
||||
expect(output.join('\n')).toContain('npx -y @anthropic/slack-mcp');
|
||||
expect(output.join('\n')).toContain('TOKEN');
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('handles missing file', () => {
|
||||
const cmd = createClaudeCommand({ client, log });
|
||||
cmd.parseAsync(['show', '-p', join(tmpDir, 'nonexistent.json')], { from: 'user' });
|
||||
|
||||
expect(output.join('\n')).toContain('No .mcp.json found');
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('add', () => {
|
||||
it('adds a server entry', () => {
|
||||
const filePath = join(tmpDir, '.mcp.json');
|
||||
const cmd = createClaudeCommand({ client, log });
|
||||
cmd.parseAsync(['add', 'my-server', '-c', 'npx', '-a', '-y', 'my-pkg', '-p', filePath], { from: 'user' });
|
||||
|
||||
const written = JSON.parse(readFileSync(filePath, 'utf-8'));
|
||||
expect(written.mcpServers['my-server']).toEqual({
|
||||
command: 'npx',
|
||||
args: ['-y', 'my-pkg'],
|
||||
});
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('adds server with env vars', () => {
|
||||
const filePath = join(tmpDir, '.mcp.json');
|
||||
const cmd = createClaudeCommand({ client, log });
|
||||
cmd.parseAsync(['add', 'my-server', '-c', 'node', '-e', 'KEY=val', 'SECRET=abc', '-p', filePath], { from: 'user' });
|
||||
|
||||
const written = JSON.parse(readFileSync(filePath, 'utf-8'));
|
||||
expect(written.mcpServers['my-server'].env).toEqual({ KEY: 'val', SECRET: 'abc' });
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe('remove', () => {
|
||||
it('removes a server entry', () => {
|
||||
const filePath = join(tmpDir, '.mcp.json');
|
||||
writeFileSync(filePath, JSON.stringify({
|
||||
mcpServers: { 'slack': { command: 'npx', args: [] }, 'github': { command: 'npx', args: [] } },
|
||||
}));
|
||||
|
||||
const cmd = createClaudeCommand({ client, log });
|
||||
cmd.parseAsync(['remove', 'slack', '-p', filePath], { from: 'user' });
|
||||
|
||||
const written = JSON.parse(readFileSync(filePath, 'utf-8'));
|
||||
expect(written.mcpServers['slack']).toBeUndefined();
|
||||
expect(written.mcpServers['github']).toBeDefined();
|
||||
expect(output.join('\n')).toContain("Removed 'slack'");
|
||||
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('reports when server not found', () => {
|
||||
const filePath = join(tmpDir, '.mcp.json');
|
||||
writeFileSync(filePath, JSON.stringify({ mcpServers: {} }));
|
||||
|
||||
const cmd = createClaudeCommand({ client, log });
|
||||
cmd.parseAsync(['remove', 'nonexistent', '-p', filePath], { from: 'user' });
|
||||
|
||||
expect(output.join('\n')).toContain('not found');
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
});
|
||||
114
src/cli/tests/commands/config.test.ts
Normal file
114
src/cli/tests/commands/config.test.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { createConfigCommand } from '../../src/commands/config.js';
|
||||
import { loadConfig, saveConfig, DEFAULT_CONFIG } from '../../src/config/index.js';
|
||||
|
||||
let tempDir: string;
|
||||
let output: string[];
|
||||
|
||||
function log(...args: string[]) {
|
||||
output.push(args.join(' '));
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-config-test-'));
|
||||
output = [];
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
function makeCommand() {
|
||||
return createConfigCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
log,
|
||||
});
|
||||
}
|
||||
|
||||
describe('config view', () => {
|
||||
it('outputs default config as JSON', async () => {
|
||||
const cmd = makeCommand();
|
||||
await cmd.parseAsync(['view'], { from: 'user' });
|
||||
expect(output).toHaveLength(1);
|
||||
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
||||
expect(parsed['mcplocalUrl']).toBe('http://localhost:3200');
|
||||
expect(parsed['mcpdUrl']).toBe('http://localhost:3100');
|
||||
});
|
||||
|
||||
it('outputs config as YAML with --output yaml', async () => {
|
||||
const cmd = makeCommand();
|
||||
await cmd.parseAsync(['view', '-o', 'yaml'], { from: 'user' });
|
||||
expect(output[0]).toContain('mcplocalUrl:');
|
||||
});
|
||||
});
|
||||
|
||||
describe('config set', () => {
|
||||
it('sets mcplocalUrl', async () => {
|
||||
const cmd = makeCommand();
|
||||
await cmd.parseAsync(['set', 'mcplocalUrl', 'http://new:9000'], { from: 'user' });
|
||||
expect(output[0]).toContain('mcplocalUrl');
|
||||
const config = loadConfig({ configDir: tempDir });
|
||||
expect(config.mcplocalUrl).toBe('http://new:9000');
|
||||
});
|
||||
|
||||
it('sets mcpdUrl', async () => {
|
||||
const cmd = makeCommand();
|
||||
await cmd.parseAsync(['set', 'mcpdUrl', 'http://remote:3100'], { from: 'user' });
|
||||
const config = loadConfig({ configDir: tempDir });
|
||||
expect(config.mcpdUrl).toBe('http://remote:3100');
|
||||
});
|
||||
|
||||
it('maps daemonUrl to mcplocalUrl for backward compat', async () => {
|
||||
const cmd = makeCommand();
|
||||
await cmd.parseAsync(['set', 'daemonUrl', 'http://legacy:3000'], { from: 'user' });
|
||||
const config = loadConfig({ configDir: tempDir });
|
||||
expect(config.mcplocalUrl).toBe('http://legacy:3000');
|
||||
});
|
||||
|
||||
it('sets cacheTTLMs as integer', async () => {
|
||||
const cmd = makeCommand();
|
||||
await cmd.parseAsync(['set', 'cacheTTLMs', '60000'], { from: 'user' });
|
||||
const config = loadConfig({ configDir: tempDir });
|
||||
expect(config.cacheTTLMs).toBe(60000);
|
||||
});
|
||||
|
||||
it('sets registries as comma-separated list', async () => {
|
||||
const cmd = makeCommand();
|
||||
await cmd.parseAsync(['set', 'registries', 'official,glama'], { from: 'user' });
|
||||
const config = loadConfig({ configDir: tempDir });
|
||||
expect(config.registries).toEqual(['official', 'glama']);
|
||||
});
|
||||
|
||||
it('sets outputFormat', async () => {
|
||||
const cmd = makeCommand();
|
||||
await cmd.parseAsync(['set', 'outputFormat', 'json'], { from: 'user' });
|
||||
const config = loadConfig({ configDir: tempDir });
|
||||
expect(config.outputFormat).toBe('json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('config path', () => {
|
||||
it('shows config file path', async () => {
|
||||
const cmd = makeCommand();
|
||||
await cmd.parseAsync(['path'], { from: 'user' });
|
||||
expect(output[0]).toContain(tempDir);
|
||||
expect(output[0]).toContain('config.json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('config reset', () => {
|
||||
it('resets to defaults', async () => {
|
||||
// First set a custom value
|
||||
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://custom' }, { configDir: tempDir });
|
||||
|
||||
const cmd = makeCommand();
|
||||
await cmd.parseAsync(['reset'], { from: 'user' });
|
||||
expect(output[0]).toContain('reset');
|
||||
|
||||
const config = loadConfig({ configDir: tempDir });
|
||||
expect(config.mcplocalUrl).toBe(DEFAULT_CONFIG.mcplocalUrl);
|
||||
});
|
||||
});
|
||||
200
src/cli/tests/commands/create.test.ts
Normal file
200
src/cli/tests/commands/create.test.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { createCreateCommand } from '../../src/commands/create.js';
|
||||
import { type ApiClient, ApiError } from '../../src/api-client.js';
|
||||
|
||||
function mockClient(): ApiClient {
|
||||
return {
|
||||
get: vi.fn(async () => []),
|
||||
post: vi.fn(async () => ({ id: 'new-id', name: 'test' })),
|
||||
put: vi.fn(async () => ({})),
|
||||
delete: vi.fn(async () => {}),
|
||||
} as unknown as ApiClient;
|
||||
}
|
||||
|
||||
describe('create command', () => {
|
||||
let client: ReturnType<typeof mockClient>;
|
||||
let output: string[];
|
||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
||||
|
||||
beforeEach(() => {
|
||||
client = mockClient();
|
||||
output = [];
|
||||
});
|
||||
|
||||
describe('create server', () => {
|
||||
it('creates a server with minimal flags', async () => {
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync(['server', 'my-server'], { from: 'user' });
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/servers', expect.objectContaining({
|
||||
name: 'my-server',
|
||||
transport: 'STDIO',
|
||||
replicas: 1,
|
||||
}));
|
||||
expect(output.join('\n')).toContain("server 'test' created");
|
||||
});
|
||||
|
||||
it('creates a server with all flags', async () => {
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync([
|
||||
'server', 'ha-mcp',
|
||||
'-d', 'Home Assistant MCP',
|
||||
'--docker-image', 'ghcr.io/ha-mcp:latest',
|
||||
'--transport', 'STREAMABLE_HTTP',
|
||||
'--external-url', 'http://localhost:8086/mcp',
|
||||
'--container-port', '3000',
|
||||
'--replicas', '2',
|
||||
'--command', 'python',
|
||||
'--command', '-c',
|
||||
'--command', 'print("hello")',
|
||||
'--env', 'API_KEY=secretRef:creds:API_KEY',
|
||||
'--env', 'BASE_URL=http://localhost',
|
||||
], { from: 'user' });
|
||||
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/servers', {
|
||||
name: 'ha-mcp',
|
||||
description: 'Home Assistant MCP',
|
||||
dockerImage: 'ghcr.io/ha-mcp:latest',
|
||||
transport: 'STREAMABLE_HTTP',
|
||||
externalUrl: 'http://localhost:8086/mcp',
|
||||
containerPort: 3000,
|
||||
replicas: 2,
|
||||
command: ['python', '-c', 'print("hello")'],
|
||||
env: [
|
||||
{ name: 'API_KEY', valueFrom: { secretRef: { name: 'creds', key: 'API_KEY' } } },
|
||||
{ name: 'BASE_URL', value: 'http://localhost' },
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('defaults transport to STDIO', async () => {
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync(['server', 'test'], { from: 'user' });
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/servers', expect.objectContaining({
|
||||
transport: 'STDIO',
|
||||
}));
|
||||
});
|
||||
|
||||
it('strips null values from template when using --from-template', async () => {
|
||||
vi.mocked(client.get).mockResolvedValueOnce([{
|
||||
id: 'tpl-1',
|
||||
name: 'grafana',
|
||||
version: '1.0.0',
|
||||
description: 'Grafana MCP',
|
||||
packageName: '@leval/mcp-grafana',
|
||||
dockerImage: null,
|
||||
transport: 'STDIO',
|
||||
repositoryUrl: 'https://github.com/test',
|
||||
externalUrl: null,
|
||||
command: null,
|
||||
containerPort: null,
|
||||
replicas: 1,
|
||||
env: [{ name: 'TOKEN', required: true, description: 'A token' }],
|
||||
healthCheck: { tool: 'test', arguments: {} },
|
||||
createdAt: '2025-01-01',
|
||||
updatedAt: '2025-01-01',
|
||||
}] as never);
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync([
|
||||
'server', 'my-grafana', '--from-template=grafana',
|
||||
'--env', 'TOKEN=secretRef:creds:TOKEN',
|
||||
], { from: 'user' });
|
||||
const call = vi.mocked(client.post).mock.calls[0]![1] as Record<string, unknown>;
|
||||
// null fields from template should NOT be in the body
|
||||
expect(call).not.toHaveProperty('dockerImage');
|
||||
expect(call).not.toHaveProperty('externalUrl');
|
||||
expect(call).not.toHaveProperty('command');
|
||||
expect(call).not.toHaveProperty('containerPort');
|
||||
// non-null fields should be present
|
||||
expect(call.packageName).toBe('@leval/mcp-grafana');
|
||||
expect(call.healthCheck).toEqual({ tool: 'test', arguments: {} });
|
||||
expect(call.templateName).toBe('grafana');
|
||||
});
|
||||
|
||||
it('throws on 409 without --force', async () => {
|
||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"Server already exists: my-server"}'));
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await expect(cmd.parseAsync(['server', 'my-server'], { from: 'user' })).rejects.toThrow('API error 409');
|
||||
});
|
||||
|
||||
it('updates existing server on 409 with --force', async () => {
|
||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"Server already exists"}'));
|
||||
vi.mocked(client.get).mockResolvedValueOnce([{ id: 'srv-1', name: 'my-server' }] as never);
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync(['server', 'my-server', '--force'], { from: 'user' });
|
||||
expect(client.put).toHaveBeenCalledWith('/api/v1/servers/srv-1', expect.objectContaining({
|
||||
transport: 'STDIO',
|
||||
}));
|
||||
expect(output.join('\n')).toContain("server 'my-server' updated");
|
||||
});
|
||||
});
|
||||
|
||||
describe('create secret', () => {
|
||||
it('creates a secret with --data flags', async () => {
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync([
|
||||
'secret', 'ha-creds',
|
||||
'--data', 'TOKEN=abc123',
|
||||
'--data', 'URL=https://ha.local',
|
||||
], { from: 'user' });
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/secrets', {
|
||||
name: 'ha-creds',
|
||||
data: { TOKEN: 'abc123', URL: 'https://ha.local' },
|
||||
});
|
||||
expect(output.join('\n')).toContain("secret 'test' created");
|
||||
});
|
||||
|
||||
it('creates a secret with empty data', async () => {
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync(['secret', 'empty-secret'], { from: 'user' });
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/secrets', {
|
||||
name: 'empty-secret',
|
||||
data: {},
|
||||
});
|
||||
});
|
||||
|
||||
it('throws on 409 without --force', async () => {
|
||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"Secret already exists: my-creds"}'));
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await expect(cmd.parseAsync(['secret', 'my-creds', '--data', 'KEY=val'], { from: 'user' })).rejects.toThrow('API error 409');
|
||||
});
|
||||
|
||||
it('updates existing secret on 409 with --force', async () => {
|
||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"Secret already exists"}'));
|
||||
vi.mocked(client.get).mockResolvedValueOnce([{ id: 'sec-1', name: 'my-creds' }] as never);
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync(['secret', 'my-creds', '--data', 'KEY=val', '--force'], { from: 'user' });
|
||||
expect(client.put).toHaveBeenCalledWith('/api/v1/secrets/sec-1', { data: { KEY: 'val' } });
|
||||
expect(output.join('\n')).toContain("secret 'my-creds' updated");
|
||||
});
|
||||
});
|
||||
|
||||
describe('create project', () => {
|
||||
it('creates a project', async () => {
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync(['project', 'my-project', '-d', 'A test project'], { from: 'user' });
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/projects', {
|
||||
name: 'my-project',
|
||||
description: 'A test project',
|
||||
});
|
||||
expect(output.join('\n')).toContain("project 'test' created");
|
||||
});
|
||||
|
||||
it('creates a project with no description', async () => {
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync(['project', 'minimal'], { from: 'user' });
|
||||
expect(client.post).toHaveBeenCalledWith('/api/v1/projects', {
|
||||
name: 'minimal',
|
||||
description: '',
|
||||
});
|
||||
});
|
||||
|
||||
it('updates existing project on 409 with --force', async () => {
|
||||
vi.mocked(client.post).mockRejectedValueOnce(new ApiError(409, '{"error":"Project already exists"}'));
|
||||
vi.mocked(client.get).mockResolvedValueOnce([{ id: 'proj-1', name: 'my-proj' }] as never);
|
||||
const cmd = createCreateCommand({ client, log });
|
||||
await cmd.parseAsync(['project', 'my-proj', '-d', 'updated', '--force'], { from: 'user' });
|
||||
expect(client.put).toHaveBeenCalledWith('/api/v1/projects/proj-1', { description: 'updated' });
|
||||
expect(output.join('\n')).toContain("project 'my-proj' updated");
|
||||
});
|
||||
});
|
||||
});
|
||||
290
src/cli/tests/commands/describe.test.ts
Normal file
290
src/cli/tests/commands/describe.test.ts
Normal file
@@ -0,0 +1,290 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { createDescribeCommand } from '../../src/commands/describe.js';
|
||||
import type { DescribeCommandDeps } from '../../src/commands/describe.js';
|
||||
import type { ApiClient } from '../../src/api-client.js';
|
||||
|
||||
function mockClient(): ApiClient {
|
||||
return {
|
||||
get: vi.fn(async () => []),
|
||||
post: vi.fn(async () => ({})),
|
||||
put: vi.fn(async () => ({})),
|
||||
delete: vi.fn(async () => {}),
|
||||
} as unknown as ApiClient;
|
||||
}
|
||||
|
||||
function makeDeps(item: unknown = {}): DescribeCommandDeps & { output: string[] } {
|
||||
const output: string[] = [];
|
||||
return {
|
||||
output,
|
||||
client: mockClient(),
|
||||
fetchResource: vi.fn(async () => item),
|
||||
log: (...args: string[]) => output.push(args.join(' ')),
|
||||
};
|
||||
}
|
||||
|
||||
describe('describe command', () => {
|
||||
it('shows detailed server info with sections', async () => {
|
||||
const deps = makeDeps({
|
||||
id: 'srv-1',
|
||||
name: 'slack',
|
||||
transport: 'STDIO',
|
||||
packageName: '@slack/mcp',
|
||||
dockerImage: null,
|
||||
env: [],
|
||||
createdAt: '2025-01-01',
|
||||
});
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'server', 'srv-1']);
|
||||
|
||||
expect(deps.fetchResource).toHaveBeenCalledWith('servers', 'srv-1');
|
||||
const text = deps.output.join('\n');
|
||||
expect(text).toContain('=== Server: slack ===');
|
||||
expect(text).toContain('Name:');
|
||||
expect(text).toContain('slack');
|
||||
expect(text).toContain('Transport:');
|
||||
expect(text).toContain('STDIO');
|
||||
expect(text).toContain('Package:');
|
||||
expect(text).toContain('@slack/mcp');
|
||||
expect(text).toContain('Metadata:');
|
||||
expect(text).toContain('ID:');
|
||||
});
|
||||
|
||||
it('resolves resource aliases', async () => {
|
||||
const deps = makeDeps({ id: 's1' });
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'sec', 's1']);
|
||||
expect(deps.fetchResource).toHaveBeenCalledWith('secrets', 's1');
|
||||
});
|
||||
|
||||
it('outputs JSON format', async () => {
|
||||
const deps = makeDeps({ id: 'srv-1', name: 'slack' });
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'server', 'srv-1', '-o', 'json']);
|
||||
|
||||
const parsed = JSON.parse(deps.output[0] ?? '');
|
||||
expect(parsed.name).toBe('slack');
|
||||
});
|
||||
|
||||
it('outputs YAML format', async () => {
|
||||
const deps = makeDeps({ id: 'srv-1', name: 'slack' });
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'server', 'srv-1', '-o', 'yaml']);
|
||||
expect(deps.output[0]).toContain('name: slack');
|
||||
});
|
||||
|
||||
it('shows project detail', async () => {
|
||||
const deps = makeDeps({
|
||||
id: 'proj-1',
|
||||
name: 'my-project',
|
||||
description: 'A test project',
|
||||
ownerId: 'user-1',
|
||||
createdAt: '2025-01-01',
|
||||
});
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'project', 'proj-1']);
|
||||
|
||||
const text = deps.output.join('\n');
|
||||
expect(text).toContain('=== Project: my-project ===');
|
||||
expect(text).toContain('A test project');
|
||||
expect(text).toContain('user-1');
|
||||
});
|
||||
|
||||
it('shows secret detail with masked values', async () => {
|
||||
const deps = makeDeps({
|
||||
id: 'sec-1',
|
||||
name: 'ha-creds',
|
||||
data: { TOKEN: 'abc123', URL: 'https://ha.local' },
|
||||
createdAt: '2025-01-01',
|
||||
});
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'secret', 'sec-1']);
|
||||
|
||||
const text = deps.output.join('\n');
|
||||
expect(text).toContain('=== Secret: ha-creds ===');
|
||||
expect(text).toContain('TOKEN');
|
||||
expect(text).toContain('***');
|
||||
expect(text).not.toContain('abc123');
|
||||
expect(text).toContain('use --show-values to reveal');
|
||||
});
|
||||
|
||||
it('shows secret detail with revealed values when --show-values', async () => {
|
||||
const deps = makeDeps({
|
||||
id: 'sec-1',
|
||||
name: 'ha-creds',
|
||||
data: { TOKEN: 'abc123' },
|
||||
createdAt: '2025-01-01',
|
||||
});
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'secret', 'sec-1', '--show-values']);
|
||||
|
||||
const text = deps.output.join('\n');
|
||||
expect(text).toContain('abc123');
|
||||
expect(text).not.toContain('***');
|
||||
});
|
||||
|
||||
it('shows instance detail with container info', async () => {
|
||||
const deps = makeDeps({
|
||||
id: 'inst-1',
|
||||
serverId: 'srv-1',
|
||||
status: 'RUNNING',
|
||||
containerId: 'abc123',
|
||||
port: 3000,
|
||||
createdAt: '2025-01-01',
|
||||
});
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'instance', 'inst-1']);
|
||||
|
||||
const text = deps.output.join('\n');
|
||||
expect(text).toContain('=== Instance: inst-1 ===');
|
||||
expect(text).toContain('RUNNING');
|
||||
expect(text).toContain('abc123');
|
||||
});
|
||||
|
||||
it('resolves server name to instance for describe instance', async () => {
|
||||
const deps = makeDeps({
|
||||
id: 'inst-1',
|
||||
serverId: 'srv-1',
|
||||
server: { name: 'my-grafana' },
|
||||
status: 'RUNNING',
|
||||
containerId: 'abc123',
|
||||
port: 3000,
|
||||
});
|
||||
// resolveNameOrId will throw (not a CUID, name won't match instances)
|
||||
vi.mocked(deps.client.get)
|
||||
.mockResolvedValueOnce([] as never) // instances list (no name match)
|
||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'my-grafana' }] as never) // servers list
|
||||
.mockResolvedValueOnce([{ id: 'inst-1', status: 'RUNNING' }] as never); // instances for server
|
||||
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'instance', 'my-grafana']);
|
||||
|
||||
expect(deps.fetchResource).toHaveBeenCalledWith('instances', 'inst-1');
|
||||
});
|
||||
|
||||
it('resolves server name and picks running instance over stopped', async () => {
|
||||
const deps = makeDeps({
|
||||
id: 'inst-2',
|
||||
serverId: 'srv-1',
|
||||
server: { name: 'my-ha' },
|
||||
status: 'RUNNING',
|
||||
containerId: 'def456',
|
||||
});
|
||||
vi.mocked(deps.client.get)
|
||||
.mockResolvedValueOnce([] as never) // instances list
|
||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'my-ha' }] as never)
|
||||
.mockResolvedValueOnce([
|
||||
{ id: 'inst-1', status: 'ERROR' },
|
||||
{ id: 'inst-2', status: 'RUNNING' },
|
||||
] as never);
|
||||
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'instance', 'my-ha']);
|
||||
|
||||
expect(deps.fetchResource).toHaveBeenCalledWith('instances', 'inst-2');
|
||||
});
|
||||
|
||||
it('throws when no instances found for server name', async () => {
|
||||
const deps = makeDeps();
|
||||
vi.mocked(deps.client.get)
|
||||
.mockResolvedValueOnce([] as never) // instances list
|
||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'my-server' }] as never)
|
||||
.mockResolvedValueOnce([] as never); // no instances
|
||||
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await expect(cmd.parseAsync(['node', 'test', 'instance', 'my-server'])).rejects.toThrow(
|
||||
/No instances found/,
|
||||
);
|
||||
});
|
||||
|
||||
it('shows instance with server name in header', async () => {
|
||||
const deps = makeDeps({
|
||||
id: 'inst-1',
|
||||
serverId: 'srv-1',
|
||||
server: { name: 'my-grafana' },
|
||||
status: 'RUNNING',
|
||||
containerId: 'abc123',
|
||||
port: 3000,
|
||||
});
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'instance', 'inst-1']);
|
||||
|
||||
const text = deps.output.join('\n');
|
||||
expect(text).toContain('=== Instance: my-grafana ===');
|
||||
});
|
||||
|
||||
it('shows instance health and events', async () => {
|
||||
const deps = makeDeps({
|
||||
id: 'inst-1',
|
||||
serverId: 'srv-1',
|
||||
server: { name: 'my-grafana' },
|
||||
status: 'RUNNING',
|
||||
containerId: 'abc123',
|
||||
healthStatus: 'healthy',
|
||||
lastHealthCheck: '2025-01-15T10:30:00Z',
|
||||
events: [
|
||||
{ timestamp: '2025-01-15T10:30:00Z', type: 'Normal', message: 'Health check passed (45ms)' },
|
||||
],
|
||||
});
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'instance', 'inst-1']);
|
||||
|
||||
const text = deps.output.join('\n');
|
||||
expect(text).toContain('Health:');
|
||||
expect(text).toContain('healthy');
|
||||
expect(text).toContain('Events:');
|
||||
expect(text).toContain('Health check passed');
|
||||
});
|
||||
|
||||
it('shows server healthCheck section', async () => {
|
||||
const deps = makeDeps({
|
||||
id: 'srv-1',
|
||||
name: 'my-grafana',
|
||||
transport: 'STDIO',
|
||||
healthCheck: {
|
||||
tool: 'list_datasources',
|
||||
arguments: {},
|
||||
intervalSeconds: 60,
|
||||
timeoutSeconds: 10,
|
||||
failureThreshold: 3,
|
||||
},
|
||||
});
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'server', 'srv-1']);
|
||||
|
||||
const text = deps.output.join('\n');
|
||||
expect(text).toContain('Health Check:');
|
||||
expect(text).toContain('list_datasources');
|
||||
expect(text).toContain('60s');
|
||||
expect(text).toContain('Failure Threshold:');
|
||||
});
|
||||
|
||||
it('shows template detail with healthCheck and usage', async () => {
|
||||
const deps = makeDeps({
|
||||
id: 'tpl-1',
|
||||
name: 'grafana',
|
||||
transport: 'STDIO',
|
||||
version: '1.0.0',
|
||||
packageName: '@leval/mcp-grafana',
|
||||
env: [
|
||||
{ name: 'GRAFANA_URL', required: true, description: 'Grafana instance URL' },
|
||||
],
|
||||
healthCheck: {
|
||||
tool: 'list_datasources',
|
||||
arguments: {},
|
||||
intervalSeconds: 60,
|
||||
timeoutSeconds: 10,
|
||||
failureThreshold: 3,
|
||||
},
|
||||
});
|
||||
const cmd = createDescribeCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'template', 'tpl-1']);
|
||||
|
||||
const text = deps.output.join('\n');
|
||||
expect(text).toContain('=== Template: grafana ===');
|
||||
expect(text).toContain('@leval/mcp-grafana');
|
||||
expect(text).toContain('GRAFANA_URL');
|
||||
expect(text).toContain('Health Check:');
|
||||
expect(text).toContain('list_datasources');
|
||||
expect(text).toContain('mcpctl create server my-grafana --from-template=grafana');
|
||||
});
|
||||
});
|
||||
153
src/cli/tests/commands/edit.test.ts
Normal file
153
src/cli/tests/commands/edit.test.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { readFileSync, writeFileSync } from 'node:fs';
|
||||
import yaml from 'js-yaml';
|
||||
import { createEditCommand } from '../../src/commands/edit.js';
|
||||
import type { ApiClient } from '../../src/api-client.js';
|
||||
|
||||
function mockClient(): ApiClient {
|
||||
return {
|
||||
get: vi.fn(async () => ({})),
|
||||
post: vi.fn(async () => ({})),
|
||||
put: vi.fn(async () => ({})),
|
||||
delete: vi.fn(async () => {}),
|
||||
} as unknown as ApiClient;
|
||||
}
|
||||
|
||||
describe('edit command', () => {
|
||||
let client: ReturnType<typeof mockClient>;
|
||||
let output: string[];
|
||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
||||
|
||||
beforeEach(() => {
|
||||
client = mockClient();
|
||||
output = [];
|
||||
});
|
||||
|
||||
it('fetches server, opens editor, applies changes on save', async () => {
|
||||
// GET /api/v1/servers returns list for resolveNameOrId
|
||||
vi.mocked(client.get).mockImplementation(async (path: string) => {
|
||||
if (path === '/api/v1/servers') {
|
||||
return [{ id: 'srv-1', name: 'ha-mcp' }];
|
||||
}
|
||||
// GET /api/v1/servers/srv-1 returns full server
|
||||
return {
|
||||
id: 'srv-1',
|
||||
name: 'ha-mcp',
|
||||
description: 'Old desc',
|
||||
transport: 'STDIO',
|
||||
replicas: 1,
|
||||
createdAt: '2025-01-01',
|
||||
updatedAt: '2025-01-01',
|
||||
version: 1,
|
||||
};
|
||||
});
|
||||
|
||||
const cmd = createEditCommand({
|
||||
client,
|
||||
log,
|
||||
getEditor: () => 'vi',
|
||||
openEditor: (filePath) => {
|
||||
// Simulate user editing the file
|
||||
const content = readFileSync(filePath, 'utf-8');
|
||||
const modified = content
|
||||
.replace('Old desc', 'New desc')
|
||||
.replace('replicas: 1', 'replicas: 3');
|
||||
writeFileSync(filePath, modified, 'utf-8');
|
||||
},
|
||||
});
|
||||
|
||||
await cmd.parseAsync(['server', 'ha-mcp'], { from: 'user' });
|
||||
|
||||
expect(client.put).toHaveBeenCalledWith('/api/v1/servers/srv-1', expect.objectContaining({
|
||||
description: 'New desc',
|
||||
replicas: 3,
|
||||
}));
|
||||
expect(output.join('\n')).toContain("server 'ha-mcp' updated");
|
||||
});
|
||||
|
||||
it('detects no changes and skips PUT', async () => {
|
||||
vi.mocked(client.get).mockImplementation(async (path: string) => {
|
||||
if (path === '/api/v1/servers') return [{ id: 'srv-1', name: 'test' }];
|
||||
return {
|
||||
id: 'srv-1', name: 'test', description: '', transport: 'STDIO',
|
||||
createdAt: '2025-01-01', updatedAt: '2025-01-01', version: 1,
|
||||
};
|
||||
});
|
||||
|
||||
const cmd = createEditCommand({
|
||||
client,
|
||||
log,
|
||||
getEditor: () => 'vi',
|
||||
openEditor: () => {
|
||||
// Don't modify the file
|
||||
},
|
||||
});
|
||||
|
||||
await cmd.parseAsync(['server', 'test'], { from: 'user' });
|
||||
|
||||
expect(client.put).not.toHaveBeenCalled();
|
||||
expect(output.join('\n')).toContain("unchanged");
|
||||
});
|
||||
|
||||
it('handles empty file as cancel', async () => {
|
||||
vi.mocked(client.get).mockImplementation(async (path: string) => {
|
||||
if (path === '/api/v1/servers') return [{ id: 'srv-1', name: 'test' }];
|
||||
return { id: 'srv-1', name: 'test', createdAt: '2025-01-01', updatedAt: '2025-01-01', version: 1 };
|
||||
});
|
||||
|
||||
const cmd = createEditCommand({
|
||||
client,
|
||||
log,
|
||||
getEditor: () => 'vi',
|
||||
openEditor: (filePath) => {
|
||||
writeFileSync(filePath, '', 'utf-8');
|
||||
},
|
||||
});
|
||||
|
||||
await cmd.parseAsync(['server', 'test'], { from: 'user' });
|
||||
|
||||
expect(client.put).not.toHaveBeenCalled();
|
||||
expect(output.join('\n')).toContain('cancelled');
|
||||
});
|
||||
|
||||
it('strips read-only fields from editor content', async () => {
|
||||
vi.mocked(client.get).mockImplementation(async (path: string) => {
|
||||
if (path === '/api/v1/servers') return [{ id: 'srv-1', name: 'test' }];
|
||||
return {
|
||||
id: 'srv-1', name: 'test', description: '', transport: 'STDIO',
|
||||
createdAt: '2025-01-01', updatedAt: '2025-01-01', version: 1,
|
||||
};
|
||||
});
|
||||
|
||||
let editorContent = '';
|
||||
const cmd = createEditCommand({
|
||||
client,
|
||||
log,
|
||||
getEditor: () => 'vi',
|
||||
openEditor: (filePath) => {
|
||||
editorContent = readFileSync(filePath, 'utf-8');
|
||||
},
|
||||
});
|
||||
|
||||
await cmd.parseAsync(['server', 'test'], { from: 'user' });
|
||||
|
||||
// The editor content should NOT contain read-only fields
|
||||
expect(editorContent).not.toContain('id:');
|
||||
expect(editorContent).not.toContain('createdAt');
|
||||
expect(editorContent).not.toContain('updatedAt');
|
||||
expect(editorContent).not.toContain('version');
|
||||
// But should contain editable fields
|
||||
expect(editorContent).toContain('name:');
|
||||
});
|
||||
|
||||
it('rejects edit instance with error message', async () => {
|
||||
const cmd = createEditCommand({ client, log });
|
||||
|
||||
await cmd.parseAsync(['instance', 'inst-1'], { from: 'user' });
|
||||
|
||||
expect(client.get).not.toHaveBeenCalled();
|
||||
expect(client.put).not.toHaveBeenCalled();
|
||||
expect(output.join('\n')).toContain('immutable');
|
||||
});
|
||||
|
||||
});
|
||||
88
src/cli/tests/commands/get.test.ts
Normal file
88
src/cli/tests/commands/get.test.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import { describe, it, expect, vi } from 'vitest';
|
||||
import { createGetCommand } from '../../src/commands/get.js';
|
||||
import type { GetCommandDeps } from '../../src/commands/get.js';
|
||||
|
||||
function makeDeps(items: unknown[] = []): GetCommandDeps & { output: string[] } {
|
||||
const output: string[] = [];
|
||||
return {
|
||||
output,
|
||||
fetchResource: vi.fn(async () => items),
|
||||
log: (...args: string[]) => output.push(args.join(' ')),
|
||||
};
|
||||
}
|
||||
|
||||
describe('get command', () => {
|
||||
it('lists servers in table format', async () => {
|
||||
const deps = makeDeps([
|
||||
{ id: 'srv-1', name: 'slack', transport: 'STDIO', packageName: '@slack/mcp', dockerImage: null },
|
||||
{ id: 'srv-2', name: 'github', transport: 'SSE', packageName: null, dockerImage: 'ghcr.io/github-mcp' },
|
||||
]);
|
||||
const cmd = createGetCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'servers']);
|
||||
|
||||
expect(deps.fetchResource).toHaveBeenCalledWith('servers', undefined);
|
||||
expect(deps.output[0]).toContain('NAME');
|
||||
expect(deps.output[0]).toContain('TRANSPORT');
|
||||
expect(deps.output.join('\n')).toContain('slack');
|
||||
expect(deps.output.join('\n')).toContain('github');
|
||||
});
|
||||
|
||||
it('resolves resource aliases', async () => {
|
||||
const deps = makeDeps([]);
|
||||
const cmd = createGetCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'srv']);
|
||||
expect(deps.fetchResource).toHaveBeenCalledWith('servers', undefined);
|
||||
});
|
||||
|
||||
it('passes ID when provided', async () => {
|
||||
const deps = makeDeps([{ id: 'srv-1', name: 'slack' }]);
|
||||
const cmd = createGetCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'servers', 'srv-1']);
|
||||
expect(deps.fetchResource).toHaveBeenCalledWith('servers', 'srv-1');
|
||||
});
|
||||
|
||||
it('outputs apply-compatible JSON format', async () => {
|
||||
const deps = makeDeps([{ id: 'srv-1', name: 'slack', createdAt: '2025-01-01', updatedAt: '2025-01-01', version: 1 }]);
|
||||
const cmd = createGetCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'servers', '-o', 'json']);
|
||||
|
||||
const parsed = JSON.parse(deps.output[0] ?? '');
|
||||
// Wrapped in resource key, internal fields stripped
|
||||
expect(parsed).toHaveProperty('servers');
|
||||
expect(parsed.servers[0].name).toBe('slack');
|
||||
expect(parsed.servers[0]).not.toHaveProperty('id');
|
||||
expect(parsed.servers[0]).not.toHaveProperty('createdAt');
|
||||
expect(parsed.servers[0]).not.toHaveProperty('updatedAt');
|
||||
expect(parsed.servers[0]).not.toHaveProperty('version');
|
||||
});
|
||||
|
||||
it('outputs apply-compatible YAML format', async () => {
|
||||
const deps = makeDeps([{ id: 'srv-1', name: 'slack', createdAt: '2025-01-01' }]);
|
||||
const cmd = createGetCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'servers', '-o', 'yaml']);
|
||||
const text = deps.output[0];
|
||||
expect(text).toContain('servers:');
|
||||
expect(text).toContain('name: slack');
|
||||
expect(text).not.toContain('id:');
|
||||
expect(text).not.toContain('createdAt:');
|
||||
});
|
||||
|
||||
it('lists instances with correct columns', async () => {
|
||||
const deps = makeDeps([
|
||||
{ id: 'inst-1', serverId: 'srv-1', server: { name: 'my-grafana' }, status: 'RUNNING', containerId: 'abc123def456', port: 3000 },
|
||||
]);
|
||||
const cmd = createGetCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'instances']);
|
||||
expect(deps.output[0]).toContain('NAME');
|
||||
expect(deps.output[0]).toContain('STATUS');
|
||||
expect(deps.output.join('\n')).toContain('my-grafana');
|
||||
expect(deps.output.join('\n')).toContain('RUNNING');
|
||||
});
|
||||
|
||||
it('shows no results message for empty list', async () => {
|
||||
const deps = makeDeps([]);
|
||||
const cmd = createGetCommand(deps);
|
||||
await cmd.parseAsync(['node', 'test', 'servers']);
|
||||
expect(deps.output[0]).toContain('No servers found');
|
||||
});
|
||||
});
|
||||
148
src/cli/tests/commands/instances.test.ts
Normal file
148
src/cli/tests/commands/instances.test.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { createDeleteCommand } from '../../src/commands/delete.js';
|
||||
import { createLogsCommand } from '../../src/commands/logs.js';
|
||||
import type { ApiClient } from '../../src/api-client.js';
|
||||
|
||||
function mockClient(): ApiClient {
|
||||
return {
|
||||
get: vi.fn(async () => []),
|
||||
post: vi.fn(async () => ({})),
|
||||
put: vi.fn(async () => ({})),
|
||||
delete: vi.fn(async () => {}),
|
||||
} as unknown as ApiClient;
|
||||
}
|
||||
|
||||
describe('delete command', () => {
|
||||
let client: ReturnType<typeof mockClient>;
|
||||
let output: string[];
|
||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
||||
|
||||
beforeEach(() => {
|
||||
client = mockClient();
|
||||
output = [];
|
||||
});
|
||||
|
||||
it('deletes an instance by ID', async () => {
|
||||
const cmd = createDeleteCommand({ client, log });
|
||||
await cmd.parseAsync(['instance', 'inst-1'], { from: 'user' });
|
||||
expect(client.delete).toHaveBeenCalledWith('/api/v1/instances/inst-1');
|
||||
expect(output.join('\n')).toContain('deleted');
|
||||
});
|
||||
|
||||
it('deletes a server by ID', async () => {
|
||||
const cmd = createDeleteCommand({ client, log });
|
||||
await cmd.parseAsync(['server', 'srv-1'], { from: 'user' });
|
||||
expect(client.delete).toHaveBeenCalledWith('/api/v1/servers/srv-1');
|
||||
expect(output.join('\n')).toContain('deleted');
|
||||
});
|
||||
|
||||
it('resolves server name to ID', async () => {
|
||||
vi.mocked(client.get).mockResolvedValue([
|
||||
{ id: 'srv-abc', name: 'ha-mcp' },
|
||||
]);
|
||||
const cmd = createDeleteCommand({ client, log });
|
||||
await cmd.parseAsync(['server', 'ha-mcp'], { from: 'user' });
|
||||
expect(client.delete).toHaveBeenCalledWith('/api/v1/servers/srv-abc');
|
||||
});
|
||||
|
||||
it('deletes a project', async () => {
|
||||
const cmd = createDeleteCommand({ client, log });
|
||||
await cmd.parseAsync(['project', 'proj-1'], { from: 'user' });
|
||||
expect(client.delete).toHaveBeenCalledWith('/api/v1/projects/proj-1');
|
||||
});
|
||||
|
||||
it('accepts resource aliases', async () => {
|
||||
const cmd = createDeleteCommand({ client, log });
|
||||
await cmd.parseAsync(['srv', 'srv-1'], { from: 'user' });
|
||||
expect(client.delete).toHaveBeenCalledWith('/api/v1/servers/srv-1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('logs command', () => {
|
||||
let client: ReturnType<typeof mockClient>;
|
||||
let output: string[];
|
||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
||||
|
||||
beforeEach(() => {
|
||||
client = mockClient();
|
||||
output = [];
|
||||
});
|
||||
|
||||
it('shows logs by instance ID', async () => {
|
||||
vi.mocked(client.get)
|
||||
.mockResolvedValueOnce({ id: 'inst-1', status: 'RUNNING' } as never) // instance lookup
|
||||
.mockResolvedValueOnce({ stdout: 'hello world\n', stderr: '' } as never); // logs
|
||||
const cmd = createLogsCommand({ client, log });
|
||||
await cmd.parseAsync(['inst-1'], { from: 'user' });
|
||||
expect(client.get).toHaveBeenCalledWith('/api/v1/instances/inst-1');
|
||||
expect(client.get).toHaveBeenCalledWith('/api/v1/instances/inst-1/logs');
|
||||
expect(output.join('\n')).toContain('hello world');
|
||||
});
|
||||
|
||||
it('resolves server name to instance ID', async () => {
|
||||
vi.mocked(client.get)
|
||||
.mockRejectedValueOnce(new Error('not found')) // instance lookup fails
|
||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'my-grafana' }] as never) // servers list
|
||||
.mockResolvedValueOnce([{ id: 'inst-1', status: 'RUNNING', containerId: 'abc' }] as never) // instances for server
|
||||
.mockResolvedValueOnce({ stdout: 'grafana logs\n', stderr: '' } as never); // logs
|
||||
const cmd = createLogsCommand({ client, log });
|
||||
await cmd.parseAsync(['my-grafana'], { from: 'user' });
|
||||
expect(client.get).toHaveBeenCalledWith('/api/v1/instances/inst-1/logs');
|
||||
expect(output.join('\n')).toContain('grafana logs');
|
||||
});
|
||||
|
||||
it('picks RUNNING instance over others', async () => {
|
||||
vi.mocked(client.get)
|
||||
.mockRejectedValueOnce(new Error('not found'))
|
||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'ha-mcp' }] as never)
|
||||
.mockResolvedValueOnce([
|
||||
{ id: 'inst-err', status: 'ERROR', containerId: null },
|
||||
{ id: 'inst-ok', status: 'RUNNING', containerId: 'abc' },
|
||||
] as never)
|
||||
.mockResolvedValueOnce({ stdout: 'running instance\n', stderr: '' } as never);
|
||||
const cmd = createLogsCommand({ client, log });
|
||||
await cmd.parseAsync(['ha-mcp'], { from: 'user' });
|
||||
expect(client.get).toHaveBeenCalledWith('/api/v1/instances/inst-ok/logs');
|
||||
});
|
||||
|
||||
it('selects specific replica with --instance', async () => {
|
||||
vi.mocked(client.get)
|
||||
.mockRejectedValueOnce(new Error('not found'))
|
||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'ha-mcp' }] as never)
|
||||
.mockResolvedValueOnce([
|
||||
{ id: 'inst-0', status: 'RUNNING', containerId: 'a' },
|
||||
{ id: 'inst-1', status: 'RUNNING', containerId: 'b' },
|
||||
] as never)
|
||||
.mockResolvedValueOnce({ stdout: 'replica 1\n', stderr: '' } as never);
|
||||
const cmd = createLogsCommand({ client, log });
|
||||
await cmd.parseAsync(['ha-mcp', '-i', '1'], { from: 'user' });
|
||||
expect(client.get).toHaveBeenCalledWith('/api/v1/instances/inst-1/logs');
|
||||
});
|
||||
|
||||
it('throws on out-of-range --instance index', async () => {
|
||||
vi.mocked(client.get)
|
||||
.mockRejectedValueOnce(new Error('not found'))
|
||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'ha-mcp' }] as never)
|
||||
.mockResolvedValueOnce([{ id: 'inst-0', status: 'RUNNING' }] as never);
|
||||
const cmd = createLogsCommand({ client, log });
|
||||
await expect(cmd.parseAsync(['ha-mcp', '-i', '5'], { from: 'user' })).rejects.toThrow('out of range');
|
||||
});
|
||||
|
||||
it('throws when server has no instances', async () => {
|
||||
vi.mocked(client.get)
|
||||
.mockRejectedValueOnce(new Error('not found'))
|
||||
.mockResolvedValueOnce([{ id: 'srv-1', name: 'empty-srv' }] as never)
|
||||
.mockResolvedValueOnce([] as never);
|
||||
const cmd = createLogsCommand({ client, log });
|
||||
await expect(cmd.parseAsync(['empty-srv'], { from: 'user' })).rejects.toThrow('No instances found');
|
||||
});
|
||||
|
||||
it('passes tail option', async () => {
|
||||
vi.mocked(client.get)
|
||||
.mockResolvedValueOnce({ id: 'inst-1' } as never)
|
||||
.mockResolvedValueOnce({ stdout: '', stderr: '' } as never);
|
||||
const cmd = createLogsCommand({ client, log });
|
||||
await cmd.parseAsync(['inst-1', '-t', '50'], { from: 'user' });
|
||||
expect(client.get).toHaveBeenCalledWith('/api/v1/instances/inst-1/logs?tail=50');
|
||||
});
|
||||
});
|
||||
29
src/cli/tests/commands/project.test.ts
Normal file
29
src/cli/tests/commands/project.test.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import { createProjectCommand } from '../../src/commands/project.js';
|
||||
import type { ApiClient } from '../../src/api-client.js';
|
||||
|
||||
function mockClient(): ApiClient {
|
||||
return {
|
||||
get: vi.fn(async () => []),
|
||||
post: vi.fn(async () => ({ id: 'proj-1', name: 'my-project' })),
|
||||
put: vi.fn(async () => ({})),
|
||||
delete: vi.fn(async () => {}),
|
||||
} as unknown as ApiClient;
|
||||
}
|
||||
|
||||
describe('project command', () => {
|
||||
let client: ReturnType<typeof mockClient>;
|
||||
let output: string[];
|
||||
const log = (...args: unknown[]) => output.push(args.map(String).join(' '));
|
||||
|
||||
beforeEach(() => {
|
||||
client = mockClient();
|
||||
output = [];
|
||||
});
|
||||
|
||||
it('creates command with alias', () => {
|
||||
const cmd = createProjectCommand({ client, log });
|
||||
expect(cmd.name()).toBe('project');
|
||||
expect(cmd.alias()).toBe('proj');
|
||||
});
|
||||
});
|
||||
129
src/cli/tests/commands/status.test.ts
Normal file
129
src/cli/tests/commands/status.test.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { createStatusCommand } from '../../src/commands/status.js';
|
||||
import { saveConfig, DEFAULT_CONFIG } from '../../src/config/index.js';
|
||||
import { saveCredentials } from '../../src/auth/index.js';
|
||||
|
||||
let tempDir: string;
|
||||
let output: string[];
|
||||
|
||||
function log(...args: string[]) {
|
||||
output.push(args.join(' '));
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-status-test-'));
|
||||
output = [];
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('status command', () => {
|
||||
it('shows status in table format', async () => {
|
||||
const cmd = createStatusCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
log,
|
||||
checkHealth: async () => true,
|
||||
});
|
||||
await cmd.parseAsync([], { from: 'user' });
|
||||
const out = output.join('\n');
|
||||
expect(out).toContain('mcpctl v');
|
||||
expect(out).toContain('mcplocal:');
|
||||
expect(out).toContain('mcpd:');
|
||||
expect(out).toContain('connected');
|
||||
});
|
||||
|
||||
it('shows unreachable when daemons are down', async () => {
|
||||
const cmd = createStatusCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
log,
|
||||
checkHealth: async () => false,
|
||||
});
|
||||
await cmd.parseAsync([], { from: 'user' });
|
||||
expect(output.join('\n')).toContain('unreachable');
|
||||
});
|
||||
|
||||
it('shows not logged in when no credentials', async () => {
|
||||
const cmd = createStatusCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
log,
|
||||
checkHealth: async () => true,
|
||||
});
|
||||
await cmd.parseAsync([], { from: 'user' });
|
||||
expect(output.join('\n')).toContain('not logged in');
|
||||
});
|
||||
|
||||
it('shows logged in user when credentials exist', async () => {
|
||||
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'alice@example.com' }, { configDir: tempDir });
|
||||
const cmd = createStatusCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
log,
|
||||
checkHealth: async () => true,
|
||||
});
|
||||
await cmd.parseAsync([], { from: 'user' });
|
||||
expect(output.join('\n')).toContain('logged in as alice@example.com');
|
||||
});
|
||||
|
||||
it('shows status in JSON format', async () => {
|
||||
const cmd = createStatusCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
log,
|
||||
checkHealth: async () => true,
|
||||
});
|
||||
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
||||
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
||||
expect(parsed['version']).toBe('0.1.0');
|
||||
expect(parsed['mcplocalReachable']).toBe(true);
|
||||
expect(parsed['mcpdReachable']).toBe(true);
|
||||
});
|
||||
|
||||
it('shows status in YAML format', async () => {
|
||||
const cmd = createStatusCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
log,
|
||||
checkHealth: async () => false,
|
||||
});
|
||||
await cmd.parseAsync(['-o', 'yaml'], { from: 'user' });
|
||||
expect(output[0]).toContain('mcplocalReachable: false');
|
||||
});
|
||||
|
||||
it('checks correct URLs from config', async () => {
|
||||
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://local:3200', mcpdUrl: 'http://remote:3100' }, { configDir: tempDir });
|
||||
const checkedUrls: string[] = [];
|
||||
const cmd = createStatusCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
log,
|
||||
checkHealth: async (url) => {
|
||||
checkedUrls.push(url);
|
||||
return false;
|
||||
},
|
||||
});
|
||||
await cmd.parseAsync([], { from: 'user' });
|
||||
expect(checkedUrls).toContain('http://local:3200');
|
||||
expect(checkedUrls).toContain('http://remote:3100');
|
||||
});
|
||||
|
||||
it('shows registries from config', async () => {
|
||||
saveConfig({ ...DEFAULT_CONFIG, registries: ['official'] }, { configDir: tempDir });
|
||||
const cmd = createStatusCommand({
|
||||
configDeps: { configDir: tempDir },
|
||||
credentialsDeps: { configDir: tempDir },
|
||||
log,
|
||||
checkHealth: async () => true,
|
||||
});
|
||||
await cmd.parseAsync([], { from: 'user' });
|
||||
expect(output.join('\n')).toContain('official');
|
||||
expect(output.join('\n')).not.toContain('glama');
|
||||
});
|
||||
});
|
||||
90
src/cli/tests/config/loader.test.ts
Normal file
90
src/cli/tests/config/loader.test.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { mkdtempSync, rmSync, existsSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { loadConfig, saveConfig, mergeConfig, getConfigPath, DEFAULT_CONFIG } from '../../src/config/index.js';
|
||||
|
||||
let tempDir: string;
|
||||
|
||||
beforeEach(() => {
|
||||
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-test-'));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('getConfigPath', () => {
|
||||
it('returns path within config dir', () => {
|
||||
const path = getConfigPath('/tmp/mcpctl');
|
||||
expect(path).toBe('/tmp/mcpctl/config.json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('loadConfig', () => {
|
||||
it('returns defaults when no config file exists', () => {
|
||||
const config = loadConfig({ configDir: tempDir });
|
||||
expect(config).toEqual(DEFAULT_CONFIG);
|
||||
});
|
||||
|
||||
it('loads config from file', () => {
|
||||
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://custom:5000' }, { configDir: tempDir });
|
||||
const config = loadConfig({ configDir: tempDir });
|
||||
expect(config.mcplocalUrl).toBe('http://custom:5000');
|
||||
});
|
||||
|
||||
it('applies defaults for missing fields', () => {
|
||||
const { writeFileSync } = require('node:fs') as typeof import('node:fs');
|
||||
writeFileSync(join(tempDir, 'config.json'), '{"mcplocalUrl":"http://x:1"}');
|
||||
const config = loadConfig({ configDir: tempDir });
|
||||
expect(config.mcplocalUrl).toBe('http://x:1');
|
||||
expect(config.registries).toEqual(['official', 'glama', 'smithery']);
|
||||
});
|
||||
|
||||
it('backward compat: daemonUrl maps to mcplocalUrl', () => {
|
||||
const { writeFileSync } = require('node:fs') as typeof import('node:fs');
|
||||
writeFileSync(join(tempDir, 'config.json'), '{"daemonUrl":"http://old:3000"}');
|
||||
const config = loadConfig({ configDir: tempDir });
|
||||
expect(config.mcplocalUrl).toBe('http://old:3000');
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveConfig', () => {
|
||||
it('creates config file', () => {
|
||||
saveConfig(DEFAULT_CONFIG, { configDir: tempDir });
|
||||
expect(existsSync(join(tempDir, 'config.json'))).toBe(true);
|
||||
});
|
||||
|
||||
it('creates config directory if missing', () => {
|
||||
const nested = join(tempDir, 'nested', 'dir');
|
||||
saveConfig(DEFAULT_CONFIG, { configDir: nested });
|
||||
expect(existsSync(join(nested, 'config.json'))).toBe(true);
|
||||
});
|
||||
|
||||
it('round-trips configuration', () => {
|
||||
const custom = {
|
||||
...DEFAULT_CONFIG,
|
||||
mcplocalUrl: 'http://custom:9000',
|
||||
registries: ['official' as const],
|
||||
outputFormat: 'json' as const,
|
||||
};
|
||||
saveConfig(custom, { configDir: tempDir });
|
||||
const loaded = loadConfig({ configDir: tempDir });
|
||||
expect(loaded).toEqual(custom);
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeConfig', () => {
|
||||
it('merges overrides into existing config', () => {
|
||||
saveConfig(DEFAULT_CONFIG, { configDir: tempDir });
|
||||
const merged = mergeConfig({ mcplocalUrl: 'http://new:1234' }, { configDir: tempDir });
|
||||
expect(merged.mcplocalUrl).toBe('http://new:1234');
|
||||
expect(merged.registries).toEqual(DEFAULT_CONFIG.registries);
|
||||
});
|
||||
|
||||
it('works when no config file exists', () => {
|
||||
const merged = mergeConfig({ outputFormat: 'yaml' }, { configDir: tempDir });
|
||||
expect(merged.outputFormat).toBe('yaml');
|
||||
expect(merged.mcplocalUrl).toBe('http://localhost:3200');
|
||||
});
|
||||
});
|
||||
69
src/cli/tests/config/schema.test.ts
Normal file
69
src/cli/tests/config/schema.test.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { McpctlConfigSchema, DEFAULT_CONFIG } from '../../src/config/schema.js';
|
||||
|
||||
describe('McpctlConfigSchema', () => {
|
||||
it('provides sensible defaults from empty object', () => {
|
||||
const config = McpctlConfigSchema.parse({});
|
||||
expect(config.mcplocalUrl).toBe('http://localhost:3200');
|
||||
expect(config.mcpdUrl).toBe('http://localhost:3100');
|
||||
expect(config.registries).toEqual(['official', 'glama', 'smithery']);
|
||||
expect(config.cacheTTLMs).toBe(3_600_000);
|
||||
expect(config.outputFormat).toBe('table');
|
||||
expect(config.httpProxy).toBeUndefined();
|
||||
expect(config.httpsProxy).toBeUndefined();
|
||||
expect(config.smitheryApiKey).toBeUndefined();
|
||||
});
|
||||
|
||||
it('validates a full config', () => {
|
||||
const config = McpctlConfigSchema.parse({
|
||||
mcplocalUrl: 'http://local:3200',
|
||||
mcpdUrl: 'http://custom:4000',
|
||||
registries: ['official'],
|
||||
cacheTTLMs: 60_000,
|
||||
httpProxy: 'http://proxy:8080',
|
||||
httpsProxy: 'http://proxy:8443',
|
||||
outputFormat: 'json',
|
||||
smitheryApiKey: 'sk-test',
|
||||
});
|
||||
expect(config.mcplocalUrl).toBe('http://local:3200');
|
||||
expect(config.mcpdUrl).toBe('http://custom:4000');
|
||||
expect(config.registries).toEqual(['official']);
|
||||
expect(config.outputFormat).toBe('json');
|
||||
});
|
||||
|
||||
it('backward compat: maps daemonUrl to mcplocalUrl', () => {
|
||||
const config = McpctlConfigSchema.parse({ daemonUrl: 'http://legacy:3000' });
|
||||
expect(config.mcplocalUrl).toBe('http://legacy:3000');
|
||||
expect(config.mcpdUrl).toBe('http://localhost:3100');
|
||||
});
|
||||
|
||||
it('mcplocalUrl takes precedence over daemonUrl', () => {
|
||||
const config = McpctlConfigSchema.parse({
|
||||
daemonUrl: 'http://legacy:3000',
|
||||
mcplocalUrl: 'http://explicit:3200',
|
||||
});
|
||||
expect(config.mcplocalUrl).toBe('http://explicit:3200');
|
||||
});
|
||||
|
||||
it('rejects invalid registry names', () => {
|
||||
expect(() => McpctlConfigSchema.parse({ registries: ['invalid'] })).toThrow();
|
||||
});
|
||||
|
||||
it('rejects invalid output format', () => {
|
||||
expect(() => McpctlConfigSchema.parse({ outputFormat: 'xml' })).toThrow();
|
||||
});
|
||||
|
||||
it('rejects negative cacheTTLMs', () => {
|
||||
expect(() => McpctlConfigSchema.parse({ cacheTTLMs: -1 })).toThrow();
|
||||
});
|
||||
|
||||
it('rejects non-integer cacheTTLMs', () => {
|
||||
expect(() => McpctlConfigSchema.parse({ cacheTTLMs: 1.5 })).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('DEFAULT_CONFIG', () => {
|
||||
it('matches schema defaults', () => {
|
||||
expect(DEFAULT_CONFIG).toEqual(McpctlConfigSchema.parse({}));
|
||||
});
|
||||
});
|
||||
67
src/cli/tests/e2e/cli-commands.test.ts
Normal file
67
src/cli/tests/e2e/cli-commands.test.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { createProgram } from '../../src/index.js';
|
||||
|
||||
/**
|
||||
* End-to-end tests that verify CLI command registration and help output
|
||||
* without requiring a running daemon.
|
||||
*/
|
||||
describe('CLI command registration (e2e)', () => {
|
||||
it('program has all expected commands', () => {
|
||||
const program = createProgram();
|
||||
const commandNames = program.commands.map((c) => c.name());
|
||||
|
||||
expect(commandNames).toContain('config');
|
||||
expect(commandNames).toContain('status');
|
||||
expect(commandNames).toContain('login');
|
||||
expect(commandNames).toContain('logout');
|
||||
expect(commandNames).toContain('get');
|
||||
expect(commandNames).toContain('describe');
|
||||
expect(commandNames).toContain('delete');
|
||||
expect(commandNames).toContain('logs');
|
||||
expect(commandNames).toContain('apply');
|
||||
expect(commandNames).toContain('create');
|
||||
expect(commandNames).toContain('edit');
|
||||
expect(commandNames).toContain('claude');
|
||||
expect(commandNames).toContain('project');
|
||||
expect(commandNames).toContain('backup');
|
||||
expect(commandNames).toContain('restore');
|
||||
});
|
||||
|
||||
it('instance command is removed (use get/delete/logs instead)', () => {
|
||||
const program = createProgram();
|
||||
const commandNames = program.commands.map((c) => c.name());
|
||||
expect(commandNames).not.toContain('instance');
|
||||
});
|
||||
|
||||
it('claude command has config management subcommands', () => {
|
||||
const program = createProgram();
|
||||
const claude = program.commands.find((c) => c.name() === 'claude');
|
||||
expect(claude).toBeDefined();
|
||||
|
||||
const subcommands = claude!.commands.map((c) => c.name());
|
||||
expect(subcommands).toContain('generate');
|
||||
expect(subcommands).toContain('show');
|
||||
expect(subcommands).toContain('add');
|
||||
expect(subcommands).toContain('remove');
|
||||
});
|
||||
|
||||
it('project command exists with alias', () => {
|
||||
const program = createProgram();
|
||||
const project = program.commands.find((c) => c.name() === 'project');
|
||||
expect(project).toBeDefined();
|
||||
expect(project!.alias()).toBe('proj');
|
||||
});
|
||||
|
||||
it('displays version', () => {
|
||||
const program = createProgram();
|
||||
expect(program.version()).toBeDefined();
|
||||
expect(program.version()).toMatch(/^\d+\.\d+\.\d+$/);
|
||||
});
|
||||
|
||||
it('displays help without error', () => {
|
||||
const program = createProgram();
|
||||
const helpText = program.helpInformation();
|
||||
expect(helpText).toContain('mcpctl');
|
||||
expect(helpText).toContain('Manage MCP servers');
|
||||
});
|
||||
});
|
||||
41
src/cli/tests/formatters/output.test.ts
Normal file
41
src/cli/tests/formatters/output.test.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { formatJson, formatYaml } from '../../src/formatters/output.js';
|
||||
|
||||
describe('formatJson', () => {
|
||||
it('formats object as indented JSON', () => {
|
||||
const result = formatJson({ key: 'value', num: 42 });
|
||||
expect(JSON.parse(result)).toEqual({ key: 'value', num: 42 });
|
||||
expect(result).toContain('\n'); // indented
|
||||
});
|
||||
|
||||
it('formats arrays', () => {
|
||||
const result = formatJson([1, 2, 3]);
|
||||
expect(JSON.parse(result)).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it('handles null and undefined values', () => {
|
||||
const result = formatJson({ a: null, b: undefined });
|
||||
const parsed = JSON.parse(result) as Record<string, unknown>;
|
||||
expect(parsed['a']).toBeNull();
|
||||
expect('b' in parsed).toBe(false); // undefined stripped by JSON
|
||||
});
|
||||
});
|
||||
|
||||
describe('formatYaml', () => {
|
||||
it('formats object as YAML', () => {
|
||||
const result = formatYaml({ key: 'value', num: 42 });
|
||||
expect(result).toContain('key: value');
|
||||
expect(result).toContain('num: 42');
|
||||
});
|
||||
|
||||
it('formats arrays', () => {
|
||||
const result = formatYaml(['a', 'b']);
|
||||
expect(result).toContain('- a');
|
||||
expect(result).toContain('- b');
|
||||
});
|
||||
|
||||
it('does not end with trailing newline', () => {
|
||||
const result = formatYaml({ x: 1 });
|
||||
expect(result.endsWith('\n')).toBe(false);
|
||||
});
|
||||
});
|
||||
87
src/cli/tests/formatters/table.test.ts
Normal file
87
src/cli/tests/formatters/table.test.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { formatTable } from '../../src/formatters/table.js';
|
||||
import type { Column } from '../../src/formatters/table.js';
|
||||
|
||||
interface TestRow {
|
||||
name: string;
|
||||
age: number;
|
||||
city: string;
|
||||
}
|
||||
|
||||
const columns: Column<TestRow>[] = [
|
||||
{ header: 'NAME', key: 'name' },
|
||||
{ header: 'AGE', key: 'age', align: 'right' },
|
||||
{ header: 'CITY', key: 'city' },
|
||||
];
|
||||
|
||||
describe('formatTable', () => {
|
||||
it('returns empty message for no rows', () => {
|
||||
expect(formatTable([], columns)).toBe('No results found.');
|
||||
});
|
||||
|
||||
it('formats a single row', () => {
|
||||
const rows = [{ name: 'Alice', age: 30, city: 'NYC' }];
|
||||
const result = formatTable(rows, columns);
|
||||
const lines = result.split('\n');
|
||||
expect(lines).toHaveLength(3); // header, separator, data
|
||||
expect(lines[0]).toContain('NAME');
|
||||
expect(lines[0]).toContain('AGE');
|
||||
expect(lines[0]).toContain('CITY');
|
||||
expect(lines[2]).toContain('Alice');
|
||||
expect(lines[2]).toContain('NYC');
|
||||
});
|
||||
|
||||
it('right-aligns numeric columns', () => {
|
||||
const rows = [{ name: 'Bob', age: 5, city: 'LA' }];
|
||||
const result = formatTable(rows, columns);
|
||||
const lines = result.split('\n');
|
||||
// AGE column should be right-aligned: " 5" or "5" padded
|
||||
const ageLine = lines[2];
|
||||
// The age value should have leading space(s) for right alignment
|
||||
expect(ageLine).toMatch(/\s+5/);
|
||||
});
|
||||
|
||||
it('auto-sizes columns to content', () => {
|
||||
const rows = [
|
||||
{ name: 'A', age: 1, city: 'X' },
|
||||
{ name: 'LongName', age: 100, city: 'LongCityName' },
|
||||
];
|
||||
const result = formatTable(rows, columns);
|
||||
const lines = result.split('\n');
|
||||
// Header should be at least as wide as longest data
|
||||
expect(lines[0]).toContain('NAME');
|
||||
expect(lines[2]).toContain('A');
|
||||
expect(lines[3]).toContain('LongName');
|
||||
expect(lines[3]).toContain('LongCityName');
|
||||
});
|
||||
|
||||
it('truncates long values when width is fixed', () => {
|
||||
const narrowCols: Column<TestRow>[] = [
|
||||
{ header: 'NAME', key: 'name', width: 5 },
|
||||
];
|
||||
const rows = [{ name: 'VeryLongName', age: 0, city: '' }];
|
||||
const result = formatTable(rows, narrowCols);
|
||||
const lines = result.split('\n');
|
||||
// Should be truncated with ellipsis
|
||||
expect(lines[2].trim().length).toBeLessThanOrEqual(5);
|
||||
expect(lines[2]).toContain('\u2026');
|
||||
});
|
||||
|
||||
it('supports function-based column keys', () => {
|
||||
const fnCols: Column<TestRow>[] = [
|
||||
{ header: 'INFO', key: (row) => `${row.name} (${row.age})` },
|
||||
];
|
||||
const rows = [{ name: 'Eve', age: 25, city: 'SF' }];
|
||||
const result = formatTable(rows, fnCols);
|
||||
expect(result).toContain('Eve (25)');
|
||||
});
|
||||
|
||||
it('handles separator line matching column widths', () => {
|
||||
const rows = [{ name: 'Test', age: 1, city: 'Here' }];
|
||||
const result = formatTable(rows, columns);
|
||||
const lines = result.split('\n');
|
||||
const separator = lines[1];
|
||||
// Separator should consist of dashes and spaces
|
||||
expect(separator).toMatch(/^[-\s]+$/);
|
||||
});
|
||||
});
|
||||
@@ -2,7 +2,8 @@
|
||||
"extends": "../../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"rootDir": "src",
|
||||
"outDir": "dist"
|
||||
"outDir": "dist",
|
||||
"types": ["node"]
|
||||
},
|
||||
"include": ["src/**/*.ts"],
|
||||
"references": [
|
||||
|
||||
@@ -0,0 +1,204 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "Role" AS ENUM ('USER', 'ADMIN');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "Transport" AS ENUM ('STDIO', 'SSE', 'STREAMABLE_HTTP');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "InstanceStatus" AS ENUM ('STARTING', 'RUNNING', 'STOPPING', 'STOPPED', 'ERROR');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "User" (
|
||||
"id" TEXT NOT NULL,
|
||||
"email" TEXT NOT NULL,
|
||||
"name" TEXT,
|
||||
"passwordHash" TEXT NOT NULL,
|
||||
"role" "Role" NOT NULL DEFAULT 'USER',
|
||||
"version" INTEGER NOT NULL DEFAULT 1,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "User_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Session" (
|
||||
"id" TEXT NOT NULL,
|
||||
"token" TEXT NOT NULL,
|
||||
"userId" TEXT NOT NULL,
|
||||
"expiresAt" TIMESTAMP(3) NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "Session_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "McpServer" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"description" TEXT NOT NULL DEFAULT '',
|
||||
"packageName" TEXT,
|
||||
"dockerImage" TEXT,
|
||||
"transport" "Transport" NOT NULL DEFAULT 'STDIO',
|
||||
"repositoryUrl" TEXT,
|
||||
"externalUrl" TEXT,
|
||||
"command" JSONB,
|
||||
"containerPort" INTEGER,
|
||||
"envTemplate" JSONB NOT NULL DEFAULT '[]',
|
||||
"version" INTEGER NOT NULL DEFAULT 1,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "McpServer_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "McpProfile" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"serverId" TEXT NOT NULL,
|
||||
"permissions" JSONB NOT NULL DEFAULT '[]',
|
||||
"envOverrides" JSONB NOT NULL DEFAULT '{}',
|
||||
"version" INTEGER NOT NULL DEFAULT 1,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "McpProfile_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Project" (
|
||||
"id" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"description" TEXT NOT NULL DEFAULT '',
|
||||
"ownerId" TEXT NOT NULL,
|
||||
"version" INTEGER NOT NULL DEFAULT 1,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "Project_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "ProjectMcpProfile" (
|
||||
"id" TEXT NOT NULL,
|
||||
"projectId" TEXT NOT NULL,
|
||||
"profileId" TEXT NOT NULL,
|
||||
|
||||
CONSTRAINT "ProjectMcpProfile_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "McpInstance" (
|
||||
"id" TEXT NOT NULL,
|
||||
"serverId" TEXT NOT NULL,
|
||||
"containerId" TEXT,
|
||||
"status" "InstanceStatus" NOT NULL DEFAULT 'STOPPED',
|
||||
"port" INTEGER,
|
||||
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||
"version" INTEGER NOT NULL DEFAULT 1,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
|
||||
CONSTRAINT "McpInstance_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "AuditLog" (
|
||||
"id" TEXT NOT NULL,
|
||||
"userId" TEXT NOT NULL,
|
||||
"action" TEXT NOT NULL,
|
||||
"resource" TEXT NOT NULL,
|
||||
"resourceId" TEXT,
|
||||
"details" JSONB NOT NULL DEFAULT '{}',
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "AuditLog_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "User_email_key" ON "User"("email");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "User_email_idx" ON "User"("email");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Session_token_key" ON "Session"("token");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Session_token_idx" ON "Session"("token");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Session_userId_idx" ON "Session"("userId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Session_expiresAt_idx" ON "Session"("expiresAt");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "McpServer_name_key" ON "McpServer"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "McpServer_name_idx" ON "McpServer"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "McpProfile_serverId_idx" ON "McpProfile"("serverId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "McpProfile_name_serverId_key" ON "McpProfile"("name", "serverId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Project_name_key" ON "Project"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Project_name_idx" ON "Project"("name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "Project_ownerId_idx" ON "Project"("ownerId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "ProjectMcpProfile_projectId_idx" ON "ProjectMcpProfile"("projectId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "ProjectMcpProfile_profileId_idx" ON "ProjectMcpProfile"("profileId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "ProjectMcpProfile_projectId_profileId_key" ON "ProjectMcpProfile"("projectId", "profileId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "McpInstance_serverId_idx" ON "McpInstance"("serverId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "McpInstance_status_idx" ON "McpInstance"("status");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "AuditLog_userId_idx" ON "AuditLog"("userId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "AuditLog_action_idx" ON "AuditLog"("action");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "AuditLog_resource_idx" ON "AuditLog"("resource");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "AuditLog_createdAt_idx" ON "AuditLog"("createdAt");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "Session" ADD CONSTRAINT "Session_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "McpProfile" ADD CONSTRAINT "McpProfile_serverId_fkey" FOREIGN KEY ("serverId") REFERENCES "McpServer"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "Project" ADD CONSTRAINT "Project_ownerId_fkey" FOREIGN KEY ("ownerId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "ProjectMcpProfile" ADD CONSTRAINT "ProjectMcpProfile_projectId_fkey" FOREIGN KEY ("projectId") REFERENCES "Project"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "ProjectMcpProfile" ADD CONSTRAINT "ProjectMcpProfile_profileId_fkey" FOREIGN KEY ("profileId") REFERENCES "McpProfile"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "McpInstance" ADD CONSTRAINT "McpInstance_serverId_fkey" FOREIGN KEY ("serverId") REFERENCES "McpServer"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "AuditLog" ADD CONSTRAINT "AuditLog_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
3
src/db/prisma/migrations/migration_lock.toml
Normal file
3
src/db/prisma/migrations/migration_lock.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
# Please do not edit this file manually
|
||||
# It should be added in your version-control system (e.g., Git)
|
||||
provider = "postgresql"
|
||||
184
src/db/prisma/schema.prisma
Normal file
184
src/db/prisma/schema.prisma
Normal file
@@ -0,0 +1,184 @@
|
||||
generator client {
|
||||
provider = "prisma-client-js"
|
||||
}
|
||||
|
||||
datasource db {
|
||||
provider = "postgresql"
|
||||
url = env("DATABASE_URL")
|
||||
}
|
||||
|
||||
// ── Users ──
|
||||
|
||||
model User {
|
||||
id String @id @default(cuid())
|
||||
email String @unique
|
||||
name String?
|
||||
passwordHash String
|
||||
role Role @default(USER)
|
||||
version Int @default(1)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
sessions Session[]
|
||||
auditLogs AuditLog[]
|
||||
projects Project[]
|
||||
|
||||
@@index([email])
|
||||
}
|
||||
|
||||
enum Role {
|
||||
USER
|
||||
ADMIN
|
||||
}
|
||||
|
||||
// ── Sessions ──
|
||||
|
||||
model Session {
|
||||
id String @id @default(cuid())
|
||||
token String @unique
|
||||
userId String
|
||||
expiresAt DateTime
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([token])
|
||||
@@index([userId])
|
||||
@@index([expiresAt])
|
||||
}
|
||||
|
||||
// ── MCP Servers ──
|
||||
|
||||
model McpServer {
|
||||
id String @id @default(cuid())
|
||||
name String @unique
|
||||
description String @default("")
|
||||
packageName String?
|
||||
dockerImage String?
|
||||
transport Transport @default(STDIO)
|
||||
repositoryUrl String?
|
||||
externalUrl String?
|
||||
command Json?
|
||||
containerPort Int?
|
||||
replicas Int @default(1)
|
||||
env Json @default("[]")
|
||||
healthCheck Json?
|
||||
version Int @default(1)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
templateName String?
|
||||
templateVersion String?
|
||||
|
||||
instances McpInstance[]
|
||||
|
||||
@@index([name])
|
||||
}
|
||||
|
||||
enum Transport {
|
||||
STDIO
|
||||
SSE
|
||||
STREAMABLE_HTTP
|
||||
}
|
||||
|
||||
// ── MCP Templates ──
|
||||
|
||||
model McpTemplate {
|
||||
id String @id @default(cuid())
|
||||
name String @unique
|
||||
version String @default("1.0.0")
|
||||
description String @default("")
|
||||
packageName String?
|
||||
dockerImage String?
|
||||
transport Transport @default(STDIO)
|
||||
repositoryUrl String?
|
||||
externalUrl String?
|
||||
command Json?
|
||||
containerPort Int?
|
||||
replicas Int @default(1)
|
||||
env Json @default("[]")
|
||||
healthCheck Json?
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@index([name])
|
||||
}
|
||||
|
||||
// ── Secrets ──
|
||||
|
||||
model Secret {
|
||||
id String @id @default(cuid())
|
||||
name String @unique
|
||||
data Json @default("{}")
|
||||
version Int @default(1)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
@@index([name])
|
||||
}
|
||||
|
||||
// ── Projects ──
|
||||
|
||||
model Project {
|
||||
id String @id @default(cuid())
|
||||
name String @unique
|
||||
description String @default("")
|
||||
ownerId String
|
||||
version Int @default(1)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
owner User @relation(fields: [ownerId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([name])
|
||||
@@index([ownerId])
|
||||
}
|
||||
|
||||
// ── MCP Instances (running containers) ──
|
||||
|
||||
model McpInstance {
|
||||
id String @id @default(cuid())
|
||||
serverId String
|
||||
containerId String?
|
||||
status InstanceStatus @default(STOPPED)
|
||||
port Int?
|
||||
metadata Json @default("{}")
|
||||
healthStatus String?
|
||||
lastHealthCheck DateTime?
|
||||
events Json @default("[]")
|
||||
version Int @default(1)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
server McpServer @relation(fields: [serverId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([serverId])
|
||||
@@index([status])
|
||||
}
|
||||
|
||||
enum InstanceStatus {
|
||||
STARTING
|
||||
RUNNING
|
||||
STOPPING
|
||||
STOPPED
|
||||
ERROR
|
||||
}
|
||||
|
||||
// ── Audit Logs ──
|
||||
|
||||
model AuditLog {
|
||||
id String @id @default(cuid())
|
||||
userId String
|
||||
action String
|
||||
resource String
|
||||
resourceId String?
|
||||
details Json @default("{}")
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
@@index([userId])
|
||||
@@index([action])
|
||||
@@index([resource])
|
||||
@@index([createdAt])
|
||||
}
|
||||
@@ -1,2 +1,18 @@
|
||||
// Database package - Prisma client and utilities
|
||||
// Will be implemented in Task 2
|
||||
export { PrismaClient } from '@prisma/client';
|
||||
export type {
|
||||
User,
|
||||
Session,
|
||||
McpServer,
|
||||
McpTemplate,
|
||||
Secret,
|
||||
Project,
|
||||
McpInstance,
|
||||
AuditLog,
|
||||
Role,
|
||||
Transport,
|
||||
InstanceStatus,
|
||||
} from '@prisma/client';
|
||||
|
||||
export { seedTemplates } from './seed/index.js';
|
||||
export type { SeedTemplate, TemplateEnvEntry, HealthCheckSpec } from './seed/index.js';
|
||||
|
||||
77
src/db/src/seed/index.ts
Normal file
77
src/db/src/seed/index.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { PrismaClient, Prisma } from '@prisma/client';
|
||||
|
||||
export interface TemplateEnvEntry {
|
||||
name: string;
|
||||
description?: string;
|
||||
required?: boolean;
|
||||
defaultValue?: string;
|
||||
}
|
||||
|
||||
export interface HealthCheckSpec {
|
||||
tool: string;
|
||||
arguments?: Record<string, unknown>;
|
||||
intervalSeconds?: number;
|
||||
timeoutSeconds?: number;
|
||||
failureThreshold?: number;
|
||||
}
|
||||
|
||||
export interface SeedTemplate {
|
||||
name: string;
|
||||
version: string;
|
||||
description: string;
|
||||
packageName?: string;
|
||||
dockerImage?: string;
|
||||
transport: 'STDIO' | 'SSE' | 'STREAMABLE_HTTP';
|
||||
repositoryUrl?: string;
|
||||
externalUrl?: string;
|
||||
command?: string[];
|
||||
containerPort?: number;
|
||||
replicas?: number;
|
||||
env?: TemplateEnvEntry[];
|
||||
healthCheck?: HealthCheckSpec;
|
||||
}
|
||||
|
||||
export async function seedTemplates(
|
||||
prisma: PrismaClient,
|
||||
templates: SeedTemplate[],
|
||||
): Promise<number> {
|
||||
let upserted = 0;
|
||||
|
||||
for (const tpl of templates) {
|
||||
await prisma.mcpTemplate.upsert({
|
||||
where: { name: tpl.name },
|
||||
update: {
|
||||
version: tpl.version,
|
||||
description: tpl.description,
|
||||
packageName: tpl.packageName ?? null,
|
||||
dockerImage: tpl.dockerImage ?? null,
|
||||
transport: tpl.transport,
|
||||
repositoryUrl: tpl.repositoryUrl ?? null,
|
||||
externalUrl: tpl.externalUrl ?? null,
|
||||
command: (tpl.command ?? Prisma.JsonNull) as Prisma.InputJsonValue,
|
||||
containerPort: tpl.containerPort ?? null,
|
||||
replicas: tpl.replicas ?? 1,
|
||||
env: (tpl.env ?? []) as unknown as Prisma.InputJsonValue,
|
||||
healthCheck: (tpl.healthCheck ?? Prisma.JsonNull) as unknown as Prisma.InputJsonValue,
|
||||
},
|
||||
create: {
|
||||
name: tpl.name,
|
||||
version: tpl.version,
|
||||
description: tpl.description,
|
||||
packageName: tpl.packageName ?? null,
|
||||
dockerImage: tpl.dockerImage ?? null,
|
||||
transport: tpl.transport,
|
||||
repositoryUrl: tpl.repositoryUrl ?? null,
|
||||
externalUrl: tpl.externalUrl ?? null,
|
||||
command: (tpl.command ?? Prisma.JsonNull) as Prisma.InputJsonValue,
|
||||
containerPort: tpl.containerPort ?? null,
|
||||
replicas: tpl.replicas ?? 1,
|
||||
env: (tpl.env ?? []) as unknown as Prisma.InputJsonValue,
|
||||
healthCheck: (tpl.healthCheck ?? Prisma.JsonNull) as unknown as Prisma.InputJsonValue,
|
||||
},
|
||||
});
|
||||
upserted++;
|
||||
}
|
||||
|
||||
return upserted;
|
||||
}
|
||||
58
src/db/tests/helpers.ts
Normal file
58
src/db/tests/helpers.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import { execSync } from 'node:child_process';
|
||||
|
||||
const TEST_DATABASE_URL = process.env['DATABASE_URL'] ??
|
||||
'postgresql://mcpctl:mcpctl_test@localhost:5433/mcpctl_test';
|
||||
|
||||
let prisma: PrismaClient | undefined;
|
||||
let schemaReady = false;
|
||||
|
||||
export function getTestClient(): PrismaClient {
|
||||
if (!prisma) {
|
||||
prisma = new PrismaClient({
|
||||
datasources: { db: { url: TEST_DATABASE_URL } },
|
||||
});
|
||||
}
|
||||
return prisma;
|
||||
}
|
||||
|
||||
export async function setupTestDb(): Promise<PrismaClient> {
|
||||
const client = getTestClient();
|
||||
|
||||
// Only push schema once per process (multiple test files share the worker)
|
||||
if (!schemaReady) {
|
||||
execSync('npx prisma db push --force-reset --skip-generate', {
|
||||
cwd: new URL('..', import.meta.url).pathname,
|
||||
env: {
|
||||
...process.env,
|
||||
DATABASE_URL: TEST_DATABASE_URL,
|
||||
// Consent required when Prisma detects AI agent context.
|
||||
// This targets the ephemeral test database (tmpfs-backed, port 5433).
|
||||
PRISMA_USER_CONSENT_FOR_DANGEROUS_AI_ACTION: 'yes',
|
||||
},
|
||||
stdio: 'pipe',
|
||||
});
|
||||
schemaReady = true;
|
||||
}
|
||||
|
||||
return client;
|
||||
}
|
||||
|
||||
export async function cleanupTestDb(): Promise<void> {
|
||||
if (prisma) {
|
||||
await prisma.$disconnect();
|
||||
prisma = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
export async function clearAllTables(client: PrismaClient): Promise<void> {
|
||||
// Delete in order respecting foreign keys
|
||||
await client.auditLog.deleteMany();
|
||||
await client.mcpInstance.deleteMany();
|
||||
await client.secret.deleteMany();
|
||||
await client.session.deleteMany();
|
||||
await client.project.deleteMany();
|
||||
await client.mcpServer.deleteMany();
|
||||
await client.mcpTemplate.deleteMany();
|
||||
await client.user.deleteMany();
|
||||
}
|
||||
312
src/db/tests/models.test.ts
Normal file
312
src/db/tests/models.test.ts
Normal file
@@ -0,0 +1,312 @@
|
||||
import { describe, it, expect, beforeAll, afterAll, beforeEach } from 'vitest';
|
||||
import type { PrismaClient } from '@prisma/client';
|
||||
import { setupTestDb, cleanupTestDb, clearAllTables, getTestClient } from './helpers.js';
|
||||
|
||||
let prisma: PrismaClient;
|
||||
|
||||
beforeAll(async () => {
|
||||
prisma = await setupTestDb();
|
||||
}, 30_000);
|
||||
|
||||
afterAll(async () => {
|
||||
await cleanupTestDb();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await clearAllTables(prisma);
|
||||
});
|
||||
|
||||
// ── Helper factories ──
|
||||
|
||||
async function createUser(overrides: { email?: string; name?: string; role?: 'USER' | 'ADMIN' } = {}) {
|
||||
return prisma.user.create({
|
||||
data: {
|
||||
email: overrides.email ?? `test-${Date.now()}@example.com`,
|
||||
name: overrides.name ?? 'Test User',
|
||||
passwordHash: '$2b$10$test-hash-placeholder',
|
||||
role: overrides.role ?? 'USER',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async function createServer(overrides: { name?: string; transport?: 'STDIO' | 'SSE' | 'STREAMABLE_HTTP' } = {}) {
|
||||
return prisma.mcpServer.create({
|
||||
data: {
|
||||
name: overrides.name ?? `server-${Date.now()}`,
|
||||
description: 'Test server',
|
||||
packageName: '@test/mcp-server',
|
||||
transport: overrides.transport ?? 'STDIO',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// ── User model ──
|
||||
|
||||
describe('User', () => {
|
||||
it('creates a user with defaults', async () => {
|
||||
const user = await createUser();
|
||||
expect(user.id).toBeDefined();
|
||||
expect(user.role).toBe('USER');
|
||||
expect(user.version).toBe(1);
|
||||
expect(user.createdAt).toBeInstanceOf(Date);
|
||||
expect(user.updatedAt).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it('enforces unique email', async () => {
|
||||
await createUser({ email: 'dup@test.com' });
|
||||
await expect(createUser({ email: 'dup@test.com' })).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('allows ADMIN role', async () => {
|
||||
const admin = await createUser({ role: 'ADMIN' });
|
||||
expect(admin.role).toBe('ADMIN');
|
||||
});
|
||||
|
||||
it('updates updatedAt on change', async () => {
|
||||
const user = await createUser();
|
||||
const original = user.updatedAt;
|
||||
// Small delay to ensure different timestamp
|
||||
await new Promise((r) => setTimeout(r, 50));
|
||||
const updated = await prisma.user.update({
|
||||
where: { id: user.id },
|
||||
data: { name: 'Updated' },
|
||||
});
|
||||
expect(updated.updatedAt.getTime()).toBeGreaterThanOrEqual(original.getTime());
|
||||
});
|
||||
});
|
||||
|
||||
// ── Session model ──
|
||||
|
||||
describe('Session', () => {
|
||||
it('creates a session linked to user', async () => {
|
||||
const user = await createUser();
|
||||
const session = await prisma.session.create({
|
||||
data: {
|
||||
token: 'test-token-123',
|
||||
userId: user.id,
|
||||
expiresAt: new Date(Date.now() + 86400_000),
|
||||
},
|
||||
});
|
||||
expect(session.token).toBe('test-token-123');
|
||||
expect(session.userId).toBe(user.id);
|
||||
});
|
||||
|
||||
it('enforces unique token', async () => {
|
||||
const user = await createUser();
|
||||
const data = {
|
||||
token: 'unique-token',
|
||||
userId: user.id,
|
||||
expiresAt: new Date(Date.now() + 86400_000),
|
||||
};
|
||||
await prisma.session.create({ data });
|
||||
await expect(prisma.session.create({ data })).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('cascades delete when user is deleted', async () => {
|
||||
const user = await createUser();
|
||||
await prisma.session.create({
|
||||
data: {
|
||||
token: 'cascade-token',
|
||||
userId: user.id,
|
||||
expiresAt: new Date(Date.now() + 86400_000),
|
||||
},
|
||||
});
|
||||
await prisma.user.delete({ where: { id: user.id } });
|
||||
const sessions = await prisma.session.findMany({ where: { userId: user.id } });
|
||||
expect(sessions).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
// ── McpServer model ──
|
||||
|
||||
describe('McpServer', () => {
|
||||
it('creates a server with defaults', async () => {
|
||||
const server = await createServer();
|
||||
expect(server.transport).toBe('STDIO');
|
||||
expect(server.version).toBe(1);
|
||||
expect(server.env).toEqual([]);
|
||||
});
|
||||
|
||||
it('enforces unique name', async () => {
|
||||
await createServer({ name: 'slack' });
|
||||
await expect(createServer({ name: 'slack' })).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('stores env as JSON', async () => {
|
||||
const server = await prisma.mcpServer.create({
|
||||
data: {
|
||||
name: 'with-env',
|
||||
env: [
|
||||
{ name: 'API_KEY', value: 'test-key' },
|
||||
],
|
||||
},
|
||||
});
|
||||
const env = server.env as Array<{ name: string }>;
|
||||
expect(env).toHaveLength(1);
|
||||
expect(env[0].name).toBe('API_KEY');
|
||||
});
|
||||
|
||||
it('supports SSE transport', async () => {
|
||||
const server = await createServer({ transport: 'SSE' });
|
||||
expect(server.transport).toBe('SSE');
|
||||
});
|
||||
});
|
||||
|
||||
// ── Secret model ──
|
||||
|
||||
describe('Secret', () => {
|
||||
it('creates a secret with defaults', async () => {
|
||||
const secret = await prisma.secret.create({
|
||||
data: { name: 'my-secret' },
|
||||
});
|
||||
expect(secret.name).toBe('my-secret');
|
||||
expect(secret.data).toEqual({});
|
||||
expect(secret.version).toBe(1);
|
||||
});
|
||||
|
||||
it('stores key-value data as JSON', async () => {
|
||||
const secret = await prisma.secret.create({
|
||||
data: {
|
||||
name: 'api-keys',
|
||||
data: { API_KEY: 'test-key', API_SECRET: 'test-secret' },
|
||||
},
|
||||
});
|
||||
const data = secret.data as Record<string, string>;
|
||||
expect(data['API_KEY']).toBe('test-key');
|
||||
expect(data['API_SECRET']).toBe('test-secret');
|
||||
});
|
||||
|
||||
it('enforces unique name', async () => {
|
||||
await prisma.secret.create({ data: { name: 'dup-secret' } });
|
||||
await expect(prisma.secret.create({ data: { name: 'dup-secret' } })).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('updates data', async () => {
|
||||
const secret = await prisma.secret.create({
|
||||
data: { name: 'updatable', data: { KEY: 'old' } },
|
||||
});
|
||||
const updated = await prisma.secret.update({
|
||||
where: { id: secret.id },
|
||||
data: { data: { KEY: 'new', EXTRA: 'added' } },
|
||||
});
|
||||
const data = updated.data as Record<string, string>;
|
||||
expect(data['KEY']).toBe('new');
|
||||
expect(data['EXTRA']).toBe('added');
|
||||
});
|
||||
});
|
||||
|
||||
// ── Project model ──
|
||||
|
||||
describe('Project', () => {
|
||||
it('creates a project with owner', async () => {
|
||||
const user = await createUser();
|
||||
const project = await prisma.project.create({
|
||||
data: { name: 'weekly-reports', ownerId: user.id },
|
||||
});
|
||||
expect(project.name).toBe('weekly-reports');
|
||||
expect(project.ownerId).toBe(user.id);
|
||||
});
|
||||
|
||||
it('enforces unique project name', async () => {
|
||||
const user = await createUser();
|
||||
await prisma.project.create({ data: { name: 'dup', ownerId: user.id } });
|
||||
await expect(
|
||||
prisma.project.create({ data: { name: 'dup', ownerId: user.id } }),
|
||||
).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('cascades delete when owner is deleted', async () => {
|
||||
const user = await createUser();
|
||||
await prisma.project.create({ data: { name: 'orphan', ownerId: user.id } });
|
||||
await prisma.user.delete({ where: { id: user.id } });
|
||||
const projects = await prisma.project.findMany({ where: { ownerId: user.id } });
|
||||
expect(projects).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
// ── McpInstance model ──
|
||||
|
||||
describe('McpInstance', () => {
|
||||
it('creates an instance linked to server', async () => {
|
||||
const server = await createServer();
|
||||
const instance = await prisma.mcpInstance.create({
|
||||
data: { serverId: server.id },
|
||||
});
|
||||
expect(instance.status).toBe('STOPPED');
|
||||
expect(instance.serverId).toBe(server.id);
|
||||
});
|
||||
|
||||
it('tracks instance status transitions', async () => {
|
||||
const server = await createServer();
|
||||
const instance = await prisma.mcpInstance.create({
|
||||
data: { serverId: server.id, status: 'STARTING' },
|
||||
});
|
||||
const running = await prisma.mcpInstance.update({
|
||||
where: { id: instance.id },
|
||||
data: { status: 'RUNNING', containerId: 'abc123', port: 8080 },
|
||||
});
|
||||
expect(running.status).toBe('RUNNING');
|
||||
expect(running.containerId).toBe('abc123');
|
||||
expect(running.port).toBe(8080);
|
||||
});
|
||||
|
||||
it('cascades delete when server is deleted', async () => {
|
||||
const server = await createServer();
|
||||
await prisma.mcpInstance.create({ data: { serverId: server.id } });
|
||||
await prisma.mcpServer.delete({ where: { id: server.id } });
|
||||
const instances = await prisma.mcpInstance.findMany({ where: { serverId: server.id } });
|
||||
expect(instances).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
// ── AuditLog model ──
|
||||
|
||||
describe('AuditLog', () => {
|
||||
it('creates an audit log entry', async () => {
|
||||
const user = await createUser();
|
||||
const log = await prisma.auditLog.create({
|
||||
data: {
|
||||
userId: user.id,
|
||||
action: 'CREATE',
|
||||
resource: 'McpServer',
|
||||
resourceId: 'server-123',
|
||||
details: { name: 'slack' },
|
||||
},
|
||||
});
|
||||
expect(log.action).toBe('CREATE');
|
||||
expect(log.resource).toBe('McpServer');
|
||||
expect(log.createdAt).toBeInstanceOf(Date);
|
||||
});
|
||||
|
||||
it('supports querying by action and resource', async () => {
|
||||
const user = await createUser();
|
||||
await prisma.auditLog.createMany({
|
||||
data: [
|
||||
{ userId: user.id, action: 'CREATE', resource: 'McpServer' },
|
||||
{ userId: user.id, action: 'UPDATE', resource: 'McpServer' },
|
||||
{ userId: user.id, action: 'CREATE', resource: 'Project' },
|
||||
],
|
||||
});
|
||||
|
||||
const creates = await prisma.auditLog.findMany({
|
||||
where: { action: 'CREATE' },
|
||||
});
|
||||
expect(creates).toHaveLength(2);
|
||||
|
||||
const serverLogs = await prisma.auditLog.findMany({
|
||||
where: { resource: 'McpServer' },
|
||||
});
|
||||
expect(serverLogs).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('cascades delete when user is deleted', async () => {
|
||||
const user = await createUser();
|
||||
await prisma.auditLog.create({
|
||||
data: { userId: user.id, action: 'TEST', resource: 'Test' },
|
||||
});
|
||||
await prisma.user.delete({ where: { id: user.id } });
|
||||
const logs = await prisma.auditLog.findMany({ where: { userId: user.id } });
|
||||
expect(logs).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
86
src/db/tests/seed.test.ts
Normal file
86
src/db/tests/seed.test.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { describe, it, expect, beforeAll, afterAll, beforeEach } from 'vitest';
|
||||
import type { PrismaClient } from '@prisma/client';
|
||||
import { setupTestDb, cleanupTestDb, clearAllTables } from './helpers.js';
|
||||
import { seedTemplates } from '../src/seed/index.js';
|
||||
import type { SeedTemplate } from '../src/seed/index.js';
|
||||
|
||||
let prisma: PrismaClient;
|
||||
|
||||
beforeAll(async () => {
|
||||
prisma = await setupTestDb();
|
||||
}, 30_000);
|
||||
|
||||
afterAll(async () => {
|
||||
await cleanupTestDb();
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
await clearAllTables(prisma);
|
||||
});
|
||||
|
||||
const testTemplates: SeedTemplate[] = [
|
||||
{
|
||||
name: 'github',
|
||||
version: '1.0.0',
|
||||
description: 'GitHub MCP server',
|
||||
packageName: '@anthropic/github-mcp',
|
||||
transport: 'STDIO',
|
||||
env: [{ name: 'GITHUB_TOKEN', description: 'Personal access token', required: true }],
|
||||
},
|
||||
{
|
||||
name: 'slack',
|
||||
version: '1.0.0',
|
||||
description: 'Slack MCP server',
|
||||
packageName: '@anthropic/slack-mcp',
|
||||
transport: 'STDIO',
|
||||
env: [],
|
||||
},
|
||||
];
|
||||
|
||||
describe('seedTemplates', () => {
|
||||
it('seeds templates', async () => {
|
||||
const count = await seedTemplates(prisma, testTemplates);
|
||||
expect(count).toBe(2);
|
||||
|
||||
const templates = await prisma.mcpTemplate.findMany({ orderBy: { name: 'asc' } });
|
||||
expect(templates).toHaveLength(2);
|
||||
expect(templates.map((t) => t.name)).toEqual(['github', 'slack']);
|
||||
});
|
||||
|
||||
it('is idempotent (upsert)', async () => {
|
||||
await seedTemplates(prisma, testTemplates);
|
||||
const count = await seedTemplates(prisma, testTemplates);
|
||||
expect(count).toBe(2);
|
||||
|
||||
const templates = await prisma.mcpTemplate.findMany();
|
||||
expect(templates).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('seeds env correctly', async () => {
|
||||
await seedTemplates(prisma, testTemplates);
|
||||
const github = await prisma.mcpTemplate.findUnique({ where: { name: 'github' } });
|
||||
const env = github!.env as Array<{ name: string; description?: string; required?: boolean }>;
|
||||
expect(env).toHaveLength(1);
|
||||
expect(env[0].name).toBe('GITHUB_TOKEN');
|
||||
expect(env[0].required).toBe(true);
|
||||
});
|
||||
|
||||
it('accepts custom template list', async () => {
|
||||
const custom: SeedTemplate[] = [
|
||||
{
|
||||
name: 'custom-template',
|
||||
version: '2.0.0',
|
||||
description: 'Custom test template',
|
||||
packageName: '@test/custom',
|
||||
transport: 'STDIO',
|
||||
env: [],
|
||||
},
|
||||
];
|
||||
const count = await seedTemplates(prisma, custom);
|
||||
expect(count).toBe(1);
|
||||
|
||||
const templates = await prisma.mcpTemplate.findMany();
|
||||
expect(templates).toHaveLength(1);
|
||||
expect(templates[0].name).toBe('custom-template');
|
||||
});
|
||||
});
|
||||
@@ -4,5 +4,7 @@ export default defineProject({
|
||||
test: {
|
||||
name: 'db',
|
||||
include: ['tests/**/*.test.ts'],
|
||||
// Test files share the same database — run sequentially
|
||||
fileParallelism: false,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
// Local LLM proxy entry point
|
||||
// Will be implemented in Task 11
|
||||
@@ -9,17 +9,27 @@
|
||||
"build": "tsc --build",
|
||||
"clean": "rimraf dist",
|
||||
"dev": "tsx watch src/index.ts",
|
||||
"start": "node dist/index.js",
|
||||
"start": "node dist/main.js",
|
||||
"test": "vitest",
|
||||
"test:run": "vitest run"
|
||||
},
|
||||
"dependencies": {
|
||||
"fastify": "^5.0.0",
|
||||
"@fastify/cors": "^10.0.0",
|
||||
"@fastify/helmet": "^12.0.0",
|
||||
"@fastify/rate-limit": "^10.0.0",
|
||||
"zod": "^3.24.0",
|
||||
"@mcpctl/db": "workspace:*",
|
||||
"@mcpctl/shared": "workspace:*",
|
||||
"@mcpctl/db": "workspace:*"
|
||||
"@prisma/client": "^6.0.0",
|
||||
"bcrypt": "^5.1.1",
|
||||
"dockerode": "^4.0.9",
|
||||
"fastify": "^5.0.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"zod": "^3.24.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bcrypt": "^5.0.2",
|
||||
"@types/dockerode": "^4.0.1",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/node": "^25.3.0"
|
||||
}
|
||||
}
|
||||
|
||||
2
src/mcpd/src/config/index.ts
Normal file
2
src/mcpd/src/config/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export { McpdConfigSchema, loadConfigFromEnv } from './schema.js';
|
||||
export type { McpdConfig } from './schema.js';
|
||||
25
src/mcpd/src/config/schema.ts
Normal file
25
src/mcpd/src/config/schema.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export const McpdConfigSchema = z.object({
|
||||
port: z.number().int().positive().default(3000),
|
||||
host: z.string().default('0.0.0.0'),
|
||||
databaseUrl: z.string().min(1),
|
||||
logLevel: z.enum(['fatal', 'error', 'warn', 'info', 'debug', 'trace']).default('info'),
|
||||
corsOrigins: z.array(z.string()).default(['*']),
|
||||
rateLimitMax: z.number().int().positive().default(100),
|
||||
rateLimitWindowMs: z.number().int().positive().default(60_000),
|
||||
});
|
||||
|
||||
export type McpdConfig = z.infer<typeof McpdConfigSchema>;
|
||||
|
||||
export function loadConfigFromEnv(env: Record<string, string | undefined> = process.env): McpdConfig {
|
||||
return McpdConfigSchema.parse({
|
||||
port: env['MCPD_PORT'] !== undefined ? parseInt(env['MCPD_PORT'], 10) : undefined,
|
||||
host: env['MCPD_HOST'],
|
||||
databaseUrl: env['DATABASE_URL'],
|
||||
logLevel: env['MCPD_LOG_LEVEL'],
|
||||
corsOrigins: env['MCPD_CORS_ORIGINS']?.split(',').map((s) => s.trim()),
|
||||
rateLimitMax: env['MCPD_RATE_LIMIT_MAX'] !== undefined ? parseInt(env['MCPD_RATE_LIMIT_MAX'], 10) : undefined,
|
||||
rateLimitWindowMs: env['MCPD_RATE_LIMIT_WINDOW_MS'] !== undefined ? parseInt(env['MCPD_RATE_LIMIT_WINDOW_MS'], 10) : undefined,
|
||||
});
|
||||
}
|
||||
@@ -1,2 +1,15 @@
|
||||
// mcpd daemon server entry point
|
||||
// Will be implemented in Task 3
|
||||
export { createServer } from './server.js';
|
||||
export type { ServerDeps } from './server.js';
|
||||
export { McpdConfigSchema, loadConfigFromEnv } from './config/index.js';
|
||||
export type { McpdConfig } from './config/index.js';
|
||||
export {
|
||||
createAuthMiddleware,
|
||||
registerSecurityPlugins,
|
||||
errorHandler,
|
||||
registerAuditHook,
|
||||
} from './middleware/index.js';
|
||||
export type { AuthDeps, AuditDeps, ErrorResponse } from './middleware/index.js';
|
||||
export { registerHealthRoutes } from './routes/index.js';
|
||||
export type { HealthDeps } from './routes/index.js';
|
||||
export { setupGracefulShutdown } from './utils/index.js';
|
||||
export type { ShutdownDeps } from './utils/index.js';
|
||||
|
||||
170
src/mcpd/src/main.ts
Normal file
170
src/mcpd/src/main.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import { readdirSync, readFileSync } from 'node:fs';
|
||||
import { join } from 'node:path';
|
||||
import { PrismaClient } from '@prisma/client';
|
||||
import yaml from 'js-yaml';
|
||||
import { seedTemplates } from '@mcpctl/db';
|
||||
import type { SeedTemplate } from '@mcpctl/db';
|
||||
import { loadConfigFromEnv } from './config/index.js';
|
||||
import { createServer } from './server.js';
|
||||
import { setupGracefulShutdown } from './utils/index.js';
|
||||
import {
|
||||
McpServerRepository,
|
||||
SecretRepository,
|
||||
McpInstanceRepository,
|
||||
ProjectRepository,
|
||||
AuditLogRepository,
|
||||
TemplateRepository,
|
||||
} from './repositories/index.js';
|
||||
import {
|
||||
McpServerService,
|
||||
SecretService,
|
||||
InstanceService,
|
||||
ProjectService,
|
||||
AuditLogService,
|
||||
DockerContainerManager,
|
||||
MetricsCollector,
|
||||
HealthAggregator,
|
||||
BackupService,
|
||||
RestoreService,
|
||||
AuthService,
|
||||
McpProxyService,
|
||||
TemplateService,
|
||||
HealthProbeRunner,
|
||||
} from './services/index.js';
|
||||
import {
|
||||
registerMcpServerRoutes,
|
||||
registerSecretRoutes,
|
||||
registerInstanceRoutes,
|
||||
registerProjectRoutes,
|
||||
registerAuditLogRoutes,
|
||||
registerHealthMonitoringRoutes,
|
||||
registerBackupRoutes,
|
||||
registerAuthRoutes,
|
||||
registerMcpProxyRoutes,
|
||||
registerTemplateRoutes,
|
||||
} from './routes/index.js';
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const config = loadConfigFromEnv();
|
||||
|
||||
// Database
|
||||
const prisma = new PrismaClient({
|
||||
datasources: { db: { url: config.databaseUrl } },
|
||||
});
|
||||
await prisma.$connect();
|
||||
|
||||
// Seed templates from YAML files
|
||||
const templatesDir = process.env.TEMPLATES_DIR ?? 'templates';
|
||||
const templateFiles = (() => {
|
||||
try {
|
||||
return readdirSync(templatesDir).filter((f) => f.endsWith('.yaml') || f.endsWith('.yml'));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
})();
|
||||
const templates: SeedTemplate[] = templateFiles.map((f) => {
|
||||
const content = readFileSync(join(templatesDir, f), 'utf-8');
|
||||
const parsed = yaml.load(content) as SeedTemplate;
|
||||
return {
|
||||
...parsed,
|
||||
transport: parsed.transport ?? 'STDIO',
|
||||
version: parsed.version ?? '1.0.0',
|
||||
description: parsed.description ?? '',
|
||||
...(parsed.healthCheck ? { healthCheck: parsed.healthCheck } : {}),
|
||||
};
|
||||
});
|
||||
await seedTemplates(prisma, templates);
|
||||
|
||||
// Repositories
|
||||
const serverRepo = new McpServerRepository(prisma);
|
||||
const secretRepo = new SecretRepository(prisma);
|
||||
const instanceRepo = new McpInstanceRepository(prisma);
|
||||
const projectRepo = new ProjectRepository(prisma);
|
||||
const auditLogRepo = new AuditLogRepository(prisma);
|
||||
const templateRepo = new TemplateRepository(prisma);
|
||||
|
||||
// Orchestrator
|
||||
const orchestrator = new DockerContainerManager();
|
||||
|
||||
// Services
|
||||
const serverService = new McpServerService(serverRepo);
|
||||
const instanceService = new InstanceService(instanceRepo, serverRepo, orchestrator, secretRepo);
|
||||
serverService.setInstanceService(instanceService);
|
||||
const secretService = new SecretService(secretRepo);
|
||||
const projectService = new ProjectService(projectRepo);
|
||||
const auditLogService = new AuditLogService(auditLogRepo);
|
||||
const metricsCollector = new MetricsCollector();
|
||||
const healthAggregator = new HealthAggregator(metricsCollector, orchestrator);
|
||||
const backupService = new BackupService(serverRepo, projectRepo, secretRepo);
|
||||
const restoreService = new RestoreService(serverRepo, projectRepo, secretRepo);
|
||||
const authService = new AuthService(prisma);
|
||||
const templateService = new TemplateService(templateRepo);
|
||||
const mcpProxyService = new McpProxyService(instanceRepo, serverRepo);
|
||||
|
||||
// Server
|
||||
const app = await createServer(config, {
|
||||
health: {
|
||||
checkDb: async () => {
|
||||
try {
|
||||
await prisma.$queryRaw`SELECT 1`;
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
// Routes
|
||||
registerMcpServerRoutes(app, serverService, instanceService);
|
||||
registerTemplateRoutes(app, templateService);
|
||||
registerSecretRoutes(app, secretService);
|
||||
registerInstanceRoutes(app, instanceService);
|
||||
registerProjectRoutes(app, projectService);
|
||||
registerAuditLogRoutes(app, auditLogService);
|
||||
registerHealthMonitoringRoutes(app, { healthAggregator, metricsCollector });
|
||||
registerBackupRoutes(app, { backupService, restoreService });
|
||||
registerAuthRoutes(app, { authService });
|
||||
registerMcpProxyRoutes(app, {
|
||||
mcpProxyService,
|
||||
auditLogService,
|
||||
authDeps: { findSession: (token) => authService.findSession(token) },
|
||||
});
|
||||
|
||||
// Start
|
||||
await app.listen({ port: config.port, host: config.host });
|
||||
app.log.info(`mcpd listening on ${config.host}:${config.port}`);
|
||||
|
||||
// Periodic container liveness sync — detect crashed containers
|
||||
const SYNC_INTERVAL_MS = 30_000; // 30s
|
||||
const syncTimer = setInterval(async () => {
|
||||
try {
|
||||
await instanceService.syncStatus();
|
||||
} catch (err) {
|
||||
app.log.error({ err }, 'Container status sync failed');
|
||||
}
|
||||
}, SYNC_INTERVAL_MS);
|
||||
|
||||
// Health probe runner — periodic MCP tool-call probes (like k8s livenessProbe)
|
||||
const healthProbeRunner = new HealthProbeRunner(
|
||||
instanceRepo,
|
||||
serverRepo,
|
||||
orchestrator,
|
||||
{ info: (msg) => app.log.info(msg), error: (obj, msg) => app.log.error(obj, msg) },
|
||||
);
|
||||
healthProbeRunner.start(15_000);
|
||||
|
||||
// Graceful shutdown
|
||||
setupGracefulShutdown(app, {
|
||||
disconnectDb: async () => {
|
||||
clearInterval(syncTimer);
|
||||
healthProbeRunner.stop();
|
||||
await prisma.$disconnect();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error('Failed to start mcpd:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
59
src/mcpd/src/middleware/audit.ts
Normal file
59
src/mcpd/src/middleware/audit.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import type { FastifyInstance, FastifyReply, FastifyRequest } from 'fastify';
|
||||
|
||||
export interface AuditDeps {
|
||||
createAuditLog: (entry: {
|
||||
userId: string;
|
||||
action: string;
|
||||
resource: string;
|
||||
resourceId?: string;
|
||||
details?: Record<string, unknown>;
|
||||
}) => Promise<void>;
|
||||
}
|
||||
|
||||
export function registerAuditHook(app: FastifyInstance, deps: AuditDeps): void {
|
||||
app.addHook('onResponse', async (request: FastifyRequest, reply: FastifyReply) => {
|
||||
// Only audit mutating methods on authenticated requests
|
||||
if (request.userId === undefined) return;
|
||||
if (request.method === 'GET' || request.method === 'HEAD' || request.method === 'OPTIONS') return;
|
||||
|
||||
const action = methodToAction(request.method);
|
||||
const { resource, resourceId } = parseRoute(request.url);
|
||||
|
||||
const entry: Parameters<typeof deps.createAuditLog>[0] = {
|
||||
userId: request.userId,
|
||||
action,
|
||||
resource,
|
||||
details: {
|
||||
method: request.method,
|
||||
url: request.url,
|
||||
statusCode: reply.statusCode,
|
||||
},
|
||||
};
|
||||
if (resourceId !== undefined) {
|
||||
entry.resourceId = resourceId;
|
||||
}
|
||||
await deps.createAuditLog(entry);
|
||||
});
|
||||
}
|
||||
|
||||
function methodToAction(method: string): string {
|
||||
switch (method) {
|
||||
case 'POST': return 'CREATE';
|
||||
case 'PUT':
|
||||
case 'PATCH': return 'UPDATE';
|
||||
case 'DELETE': return 'DELETE';
|
||||
default: return method;
|
||||
}
|
||||
}
|
||||
|
||||
function parseRoute(url: string): { resource: string; resourceId: string | undefined } {
|
||||
const parts = url.split('?')[0]?.split('/').filter(Boolean) ?? [];
|
||||
// Pattern: /api/v1/resource/:id
|
||||
if (parts.length >= 3 && parts[0] === 'api') {
|
||||
return { resource: parts[2] ?? 'unknown', resourceId: parts[3] };
|
||||
}
|
||||
if (parts.length >= 1) {
|
||||
return { resource: parts[0] ?? 'unknown', resourceId: parts[1] };
|
||||
}
|
||||
return { resource: 'unknown', resourceId: undefined };
|
||||
}
|
||||
40
src/mcpd/src/middleware/auth.ts
Normal file
40
src/mcpd/src/middleware/auth.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import type { FastifyRequest, FastifyReply } from 'fastify';
|
||||
|
||||
export interface AuthDeps {
|
||||
findSession: (token: string) => Promise<{ userId: string; expiresAt: Date } | null>;
|
||||
}
|
||||
|
||||
declare module 'fastify' {
|
||||
interface FastifyRequest {
|
||||
userId?: string;
|
||||
}
|
||||
}
|
||||
|
||||
export function createAuthMiddleware(deps: AuthDeps) {
|
||||
return async function authMiddleware(request: FastifyRequest, reply: FastifyReply): Promise<void> {
|
||||
const header = request.headers.authorization;
|
||||
if (header === undefined || !header.startsWith('Bearer ')) {
|
||||
reply.code(401).send({ error: 'Missing or invalid Authorization header' });
|
||||
return;
|
||||
}
|
||||
|
||||
const token = header.slice(7);
|
||||
if (token.length === 0) {
|
||||
reply.code(401).send({ error: 'Empty token' });
|
||||
return;
|
||||
}
|
||||
|
||||
const session = await deps.findSession(token);
|
||||
if (session === null) {
|
||||
reply.code(401).send({ error: 'Invalid token' });
|
||||
return;
|
||||
}
|
||||
|
||||
if (session.expiresAt < new Date()) {
|
||||
reply.code(401).send({ error: 'Token expired' });
|
||||
return;
|
||||
}
|
||||
|
||||
request.userId = session.userId;
|
||||
};
|
||||
}
|
||||
60
src/mcpd/src/middleware/error-handler.ts
Normal file
60
src/mcpd/src/middleware/error-handler.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import type { FastifyError, FastifyReply, FastifyRequest } from 'fastify';
|
||||
import { ZodError } from 'zod';
|
||||
|
||||
export interface ErrorResponse {
|
||||
error: string;
|
||||
statusCode: number;
|
||||
details?: unknown;
|
||||
}
|
||||
|
||||
export function errorHandler(
|
||||
error: FastifyError,
|
||||
_request: FastifyRequest,
|
||||
reply: FastifyReply,
|
||||
): void {
|
||||
// Zod validation errors
|
||||
if (error instanceof ZodError) {
|
||||
reply.code(400).send({
|
||||
error: 'Validation error',
|
||||
statusCode: 400,
|
||||
details: error.issues,
|
||||
} satisfies ErrorResponse);
|
||||
return;
|
||||
}
|
||||
|
||||
// Fastify validation errors (from schema validation)
|
||||
if (error.validation !== undefined) {
|
||||
reply.code(400).send({
|
||||
error: 'Validation error',
|
||||
statusCode: 400,
|
||||
details: error.validation,
|
||||
} satisfies ErrorResponse);
|
||||
return;
|
||||
}
|
||||
|
||||
// Rate limit exceeded
|
||||
if (error.statusCode === 429) {
|
||||
reply.code(429).send({
|
||||
error: 'Rate limit exceeded',
|
||||
statusCode: 429,
|
||||
} satisfies ErrorResponse);
|
||||
return;
|
||||
}
|
||||
|
||||
// Known HTTP errors (includes service errors like NotFoundError, ConflictError)
|
||||
const statusCode = error.statusCode ?? 500;
|
||||
if (statusCode < 500) {
|
||||
reply.code(statusCode).send({
|
||||
error: error.message,
|
||||
statusCode,
|
||||
} satisfies ErrorResponse);
|
||||
return;
|
||||
}
|
||||
|
||||
// Internal server errors — don't leak details
|
||||
reply.log.error(error);
|
||||
reply.code(500).send({
|
||||
error: 'Internal server error',
|
||||
statusCode: 500,
|
||||
} satisfies ErrorResponse);
|
||||
}
|
||||
7
src/mcpd/src/middleware/index.ts
Normal file
7
src/mcpd/src/middleware/index.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export { createAuthMiddleware } from './auth.js';
|
||||
export type { AuthDeps } from './auth.js';
|
||||
export { registerSecurityPlugins } from './security.js';
|
||||
export { errorHandler } from './error-handler.js';
|
||||
export type { ErrorResponse } from './error-handler.js';
|
||||
export { registerAuditHook } from './audit.js';
|
||||
export type { AuditDeps } from './audit.js';
|
||||
24
src/mcpd/src/middleware/security.ts
Normal file
24
src/mcpd/src/middleware/security.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import type { FastifyInstance } from 'fastify';
|
||||
import cors from '@fastify/cors';
|
||||
import helmet from '@fastify/helmet';
|
||||
import rateLimit from '@fastify/rate-limit';
|
||||
import type { McpdConfig } from '../config/index.js';
|
||||
|
||||
export async function registerSecurityPlugins(
|
||||
app: FastifyInstance,
|
||||
config: McpdConfig,
|
||||
): Promise<void> {
|
||||
await app.register(cors, {
|
||||
origin: config.corsOrigins,
|
||||
methods: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH'],
|
||||
});
|
||||
|
||||
await app.register(helmet, {
|
||||
contentSecurityPolicy: false, // API server, no HTML
|
||||
});
|
||||
|
||||
await app.register(rateLimit, {
|
||||
max: config.rateLimitMax,
|
||||
timeWindow: config.rateLimitWindowMs,
|
||||
});
|
||||
}
|
||||
70
src/mcpd/src/repositories/audit-log.repository.ts
Normal file
70
src/mcpd/src/repositories/audit-log.repository.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import type { PrismaClient, AuditLog, Prisma } from '@prisma/client';
|
||||
import type { IAuditLogRepository, AuditLogFilter } from './interfaces.js';
|
||||
|
||||
export class AuditLogRepository implements IAuditLogRepository {
|
||||
constructor(private readonly prisma: PrismaClient) {}
|
||||
|
||||
async findAll(filter?: AuditLogFilter): Promise<AuditLog[]> {
|
||||
const where = buildWhere(filter);
|
||||
return this.prisma.auditLog.findMany({
|
||||
where,
|
||||
orderBy: { createdAt: 'desc' },
|
||||
take: filter?.limit ?? 100,
|
||||
skip: filter?.offset ?? 0,
|
||||
});
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<AuditLog | null> {
|
||||
return this.prisma.auditLog.findUnique({ where: { id } });
|
||||
}
|
||||
|
||||
async create(data: {
|
||||
userId: string;
|
||||
action: string;
|
||||
resource: string;
|
||||
resourceId?: string;
|
||||
details?: Record<string, unknown>;
|
||||
}): Promise<AuditLog> {
|
||||
const createData: Prisma.AuditLogUncheckedCreateInput = {
|
||||
userId: data.userId,
|
||||
action: data.action,
|
||||
resource: data.resource,
|
||||
details: (data.details ?? {}) as Prisma.InputJsonValue,
|
||||
};
|
||||
if (data.resourceId !== undefined) {
|
||||
createData.resourceId = data.resourceId;
|
||||
}
|
||||
return this.prisma.auditLog.create({ data: createData });
|
||||
}
|
||||
|
||||
async count(filter?: AuditLogFilter): Promise<number> {
|
||||
const where = buildWhere(filter);
|
||||
return this.prisma.auditLog.count({ where });
|
||||
}
|
||||
|
||||
async deleteOlderThan(date: Date): Promise<number> {
|
||||
const result = await this.prisma.auditLog.deleteMany({
|
||||
where: { createdAt: { lt: date } },
|
||||
});
|
||||
return result.count;
|
||||
}
|
||||
}
|
||||
|
||||
function buildWhere(filter?: AuditLogFilter): Prisma.AuditLogWhereInput {
|
||||
const where: Prisma.AuditLogWhereInput = {};
|
||||
if (!filter) return where;
|
||||
|
||||
if (filter.userId !== undefined) where.userId = filter.userId;
|
||||
if (filter.action !== undefined) where.action = filter.action;
|
||||
if (filter.resource !== undefined) where.resource = filter.resource;
|
||||
if (filter.resourceId !== undefined) where.resourceId = filter.resourceId;
|
||||
|
||||
if (filter.since !== undefined || filter.until !== undefined) {
|
||||
const createdAt: Prisma.DateTimeFilter = {};
|
||||
if (filter.since !== undefined) createdAt.gte = filter.since;
|
||||
if (filter.until !== undefined) createdAt.lte = filter.until;
|
||||
where.createdAt = createdAt;
|
||||
}
|
||||
|
||||
return where;
|
||||
}
|
||||
9
src/mcpd/src/repositories/index.ts
Normal file
9
src/mcpd/src/repositories/index.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export type { IMcpServerRepository, IMcpInstanceRepository, ISecretRepository, IAuditLogRepository, AuditLogFilter } from './interfaces.js';
|
||||
export { McpServerRepository } from './mcp-server.repository.js';
|
||||
export { SecretRepository } from './secret.repository.js';
|
||||
export type { IProjectRepository } from './project.repository.js';
|
||||
export { ProjectRepository } from './project.repository.js';
|
||||
export { McpInstanceRepository } from './mcp-instance.repository.js';
|
||||
export { AuditLogRepository } from './audit-log.repository.js';
|
||||
export type { ITemplateRepository } from './template.repository.js';
|
||||
export { TemplateRepository } from './template.repository.js';
|
||||
49
src/mcpd/src/repositories/interfaces.ts
Normal file
49
src/mcpd/src/repositories/interfaces.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import type { McpServer, McpInstance, AuditLog, Secret, InstanceStatus } from '@prisma/client';
|
||||
import type { CreateMcpServerInput, UpdateMcpServerInput } from '../validation/mcp-server.schema.js';
|
||||
import type { CreateSecretInput, UpdateSecretInput } from '../validation/secret.schema.js';
|
||||
|
||||
export interface IMcpServerRepository {
|
||||
findAll(): Promise<McpServer[]>;
|
||||
findById(id: string): Promise<McpServer | null>;
|
||||
findByName(name: string): Promise<McpServer | null>;
|
||||
create(data: CreateMcpServerInput): Promise<McpServer>;
|
||||
update(id: string, data: UpdateMcpServerInput): Promise<McpServer>;
|
||||
delete(id: string): Promise<void>;
|
||||
}
|
||||
|
||||
export interface IMcpInstanceRepository {
|
||||
findAll(serverId?: string): Promise<McpInstance[]>;
|
||||
findById(id: string): Promise<McpInstance | null>;
|
||||
findByContainerId(containerId: string): Promise<McpInstance | null>;
|
||||
create(data: { serverId: string; containerId?: string; status?: InstanceStatus; port?: number; metadata?: Record<string, unknown> }): Promise<McpInstance>;
|
||||
updateStatus(id: string, status: InstanceStatus, fields?: { containerId?: string; port?: number; metadata?: Record<string, unknown>; healthStatus?: string; lastHealthCheck?: Date; events?: unknown[] }): Promise<McpInstance>;
|
||||
delete(id: string): Promise<void>;
|
||||
}
|
||||
|
||||
export interface ISecretRepository {
|
||||
findAll(): Promise<Secret[]>;
|
||||
findById(id: string): Promise<Secret | null>;
|
||||
findByName(name: string): Promise<Secret | null>;
|
||||
create(data: CreateSecretInput): Promise<Secret>;
|
||||
update(id: string, data: UpdateSecretInput): Promise<Secret>;
|
||||
delete(id: string): Promise<void>;
|
||||
}
|
||||
|
||||
export interface AuditLogFilter {
|
||||
userId?: string;
|
||||
action?: string;
|
||||
resource?: string;
|
||||
resourceId?: string;
|
||||
since?: Date;
|
||||
until?: Date;
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
}
|
||||
|
||||
export interface IAuditLogRepository {
|
||||
findAll(filter?: AuditLogFilter): Promise<AuditLog[]>;
|
||||
findById(id: string): Promise<AuditLog | null>;
|
||||
create(data: { userId: string; action: string; resource: string; resourceId?: string; details?: Record<string, unknown> }): Promise<AuditLog>;
|
||||
count(filter?: AuditLogFilter): Promise<number>;
|
||||
deleteOlderThan(date: Date): Promise<number>;
|
||||
}
|
||||
84
src/mcpd/src/repositories/mcp-instance.repository.ts
Normal file
84
src/mcpd/src/repositories/mcp-instance.repository.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import type { PrismaClient, McpInstance, InstanceStatus, Prisma } from '@prisma/client';
|
||||
import type { IMcpInstanceRepository } from './interfaces.js';
|
||||
|
||||
export class McpInstanceRepository implements IMcpInstanceRepository {
|
||||
constructor(private prisma: PrismaClient) {}
|
||||
|
||||
async findAll(serverId?: string): Promise<McpInstance[]> {
|
||||
const where: Prisma.McpInstanceWhereInput = {};
|
||||
if (serverId) {
|
||||
where.serverId = serverId;
|
||||
}
|
||||
return this.prisma.mcpInstance.findMany({
|
||||
where,
|
||||
include: { server: { select: { name: true } } },
|
||||
orderBy: { createdAt: 'desc' },
|
||||
});
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<McpInstance | null> {
|
||||
return this.prisma.mcpInstance.findUnique({
|
||||
where: { id },
|
||||
include: { server: { select: { name: true } } },
|
||||
});
|
||||
}
|
||||
|
||||
async findByContainerId(containerId: string): Promise<McpInstance | null> {
|
||||
return this.prisma.mcpInstance.findFirst({ where: { containerId } });
|
||||
}
|
||||
|
||||
async create(data: {
|
||||
serverId: string;
|
||||
containerId?: string;
|
||||
status?: InstanceStatus;
|
||||
port?: number;
|
||||
metadata?: Record<string, unknown>;
|
||||
}): Promise<McpInstance> {
|
||||
return this.prisma.mcpInstance.create({
|
||||
data: {
|
||||
serverId: data.serverId,
|
||||
containerId: data.containerId ?? null,
|
||||
status: data.status ?? 'STOPPED',
|
||||
port: data.port ?? null,
|
||||
metadata: (data.metadata ?? {}) as Prisma.InputJsonValue,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async updateStatus(
|
||||
id: string,
|
||||
status: InstanceStatus,
|
||||
fields?: { containerId?: string; port?: number; metadata?: Record<string, unknown>; healthStatus?: string; lastHealthCheck?: Date; events?: unknown[] },
|
||||
): Promise<McpInstance> {
|
||||
const updateData: Prisma.McpInstanceUpdateInput = {
|
||||
status,
|
||||
version: { increment: 1 },
|
||||
};
|
||||
if (fields?.containerId !== undefined) {
|
||||
updateData.containerId = fields.containerId;
|
||||
}
|
||||
if (fields?.port !== undefined) {
|
||||
updateData.port = fields.port;
|
||||
}
|
||||
if (fields?.metadata !== undefined) {
|
||||
updateData.metadata = fields.metadata as Prisma.InputJsonValue;
|
||||
}
|
||||
if (fields?.healthStatus !== undefined) {
|
||||
updateData.healthStatus = fields.healthStatus;
|
||||
}
|
||||
if (fields?.lastHealthCheck !== undefined) {
|
||||
updateData.lastHealthCheck = fields.lastHealthCheck;
|
||||
}
|
||||
if (fields?.events !== undefined) {
|
||||
updateData.events = fields.events as unknown as Prisma.InputJsonValue;
|
||||
}
|
||||
return this.prisma.mcpInstance.update({
|
||||
where: { id },
|
||||
data: updateData,
|
||||
});
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
await this.prisma.mcpInstance.delete({ where: { id } });
|
||||
}
|
||||
}
|
||||
59
src/mcpd/src/repositories/mcp-server.repository.ts
Normal file
59
src/mcpd/src/repositories/mcp-server.repository.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { type PrismaClient, type McpServer, Prisma } from '@prisma/client';
|
||||
import type { IMcpServerRepository } from './interfaces.js';
|
||||
import type { CreateMcpServerInput, UpdateMcpServerInput } from '../validation/mcp-server.schema.js';
|
||||
|
||||
export class McpServerRepository implements IMcpServerRepository {
|
||||
constructor(private readonly prisma: PrismaClient) {}
|
||||
|
||||
async findAll(): Promise<McpServer[]> {
|
||||
return this.prisma.mcpServer.findMany({ orderBy: { name: 'asc' } });
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<McpServer | null> {
|
||||
return this.prisma.mcpServer.findUnique({ where: { id } });
|
||||
}
|
||||
|
||||
async findByName(name: string): Promise<McpServer | null> {
|
||||
return this.prisma.mcpServer.findUnique({ where: { name } });
|
||||
}
|
||||
|
||||
async create(data: CreateMcpServerInput): Promise<McpServer> {
|
||||
return this.prisma.mcpServer.create({
|
||||
data: {
|
||||
name: data.name,
|
||||
description: data.description,
|
||||
packageName: data.packageName ?? null,
|
||||
dockerImage: data.dockerImage ?? null,
|
||||
transport: data.transport,
|
||||
repositoryUrl: data.repositoryUrl ?? null,
|
||||
externalUrl: data.externalUrl ?? null,
|
||||
command: data.command ?? Prisma.DbNull,
|
||||
containerPort: data.containerPort ?? null,
|
||||
replicas: data.replicas,
|
||||
env: data.env,
|
||||
healthCheck: (data.healthCheck ?? Prisma.JsonNull) as Prisma.InputJsonValue,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async update(id: string, data: UpdateMcpServerInput): Promise<McpServer> {
|
||||
const updateData: Record<string, unknown> = {};
|
||||
if (data.description !== undefined) updateData['description'] = data.description;
|
||||
if (data.packageName !== undefined) updateData['packageName'] = data.packageName;
|
||||
if (data.dockerImage !== undefined) updateData['dockerImage'] = data.dockerImage;
|
||||
if (data.transport !== undefined) updateData['transport'] = data.transport;
|
||||
if (data.repositoryUrl !== undefined) updateData['repositoryUrl'] = data.repositoryUrl;
|
||||
if (data.externalUrl !== undefined) updateData['externalUrl'] = data.externalUrl;
|
||||
if (data.command !== undefined) updateData['command'] = data.command;
|
||||
if (data.containerPort !== undefined) updateData['containerPort'] = data.containerPort;
|
||||
if (data.replicas !== undefined) updateData['replicas'] = data.replicas;
|
||||
if (data.env !== undefined) updateData['env'] = data.env;
|
||||
if (data.healthCheck !== undefined) updateData['healthCheck'] = (data.healthCheck ?? Prisma.JsonNull) as Prisma.InputJsonValue;
|
||||
|
||||
return this.prisma.mcpServer.update({ where: { id }, data: updateData });
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
await this.prisma.mcpServer.delete({ where: { id } });
|
||||
}
|
||||
}
|
||||
49
src/mcpd/src/repositories/project.repository.ts
Normal file
49
src/mcpd/src/repositories/project.repository.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import type { PrismaClient, Project } from '@prisma/client';
|
||||
import type { CreateProjectInput, UpdateProjectInput } from '../validation/project.schema.js';
|
||||
|
||||
export interface IProjectRepository {
|
||||
findAll(ownerId?: string): Promise<Project[]>;
|
||||
findById(id: string): Promise<Project | null>;
|
||||
findByName(name: string): Promise<Project | null>;
|
||||
create(data: CreateProjectInput & { ownerId: string }): Promise<Project>;
|
||||
update(id: string, data: UpdateProjectInput): Promise<Project>;
|
||||
delete(id: string): Promise<void>;
|
||||
}
|
||||
|
||||
export class ProjectRepository implements IProjectRepository {
|
||||
constructor(private readonly prisma: PrismaClient) {}
|
||||
|
||||
async findAll(ownerId?: string): Promise<Project[]> {
|
||||
const where = ownerId !== undefined ? { ownerId } : {};
|
||||
return this.prisma.project.findMany({ where, orderBy: { name: 'asc' } });
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<Project | null> {
|
||||
return this.prisma.project.findUnique({ where: { id } });
|
||||
}
|
||||
|
||||
async findByName(name: string): Promise<Project | null> {
|
||||
return this.prisma.project.findUnique({ where: { name } });
|
||||
}
|
||||
|
||||
async create(data: CreateProjectInput & { ownerId: string }): Promise<Project> {
|
||||
return this.prisma.project.create({
|
||||
data: {
|
||||
name: data.name,
|
||||
description: data.description,
|
||||
ownerId: data.ownerId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async update(id: string, data: UpdateProjectInput): Promise<Project> {
|
||||
const updateData: Record<string, unknown> = {};
|
||||
if (data.description !== undefined) updateData['description'] = data.description;
|
||||
return this.prisma.project.update({ where: { id }, data: updateData });
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
await this.prisma.project.delete({ where: { id } });
|
||||
}
|
||||
|
||||
}
|
||||
39
src/mcpd/src/repositories/secret.repository.ts
Normal file
39
src/mcpd/src/repositories/secret.repository.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { type PrismaClient, type Secret } from '@prisma/client';
|
||||
import type { ISecretRepository } from './interfaces.js';
|
||||
import type { CreateSecretInput, UpdateSecretInput } from '../validation/secret.schema.js';
|
||||
|
||||
export class SecretRepository implements ISecretRepository {
|
||||
constructor(private readonly prisma: PrismaClient) {}
|
||||
|
||||
async findAll(): Promise<Secret[]> {
|
||||
return this.prisma.secret.findMany({ orderBy: { name: 'asc' } });
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<Secret | null> {
|
||||
return this.prisma.secret.findUnique({ where: { id } });
|
||||
}
|
||||
|
||||
async findByName(name: string): Promise<Secret | null> {
|
||||
return this.prisma.secret.findUnique({ where: { name } });
|
||||
}
|
||||
|
||||
async create(data: CreateSecretInput): Promise<Secret> {
|
||||
return this.prisma.secret.create({
|
||||
data: {
|
||||
name: data.name,
|
||||
data: data.data,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async update(id: string, data: UpdateSecretInput): Promise<Secret> {
|
||||
return this.prisma.secret.update({
|
||||
where: { id },
|
||||
data: { data: data.data },
|
||||
});
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
await this.prisma.secret.delete({ where: { id } });
|
||||
}
|
||||
}
|
||||
82
src/mcpd/src/repositories/template.repository.ts
Normal file
82
src/mcpd/src/repositories/template.repository.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { type PrismaClient, type McpTemplate, Prisma } from '@prisma/client';
|
||||
import type { CreateTemplateInput, UpdateTemplateInput } from '../validation/template.schema.js';
|
||||
|
||||
export interface ITemplateRepository {
|
||||
findAll(): Promise<McpTemplate[]>;
|
||||
findById(id: string): Promise<McpTemplate | null>;
|
||||
findByName(name: string): Promise<McpTemplate | null>;
|
||||
search(pattern: string): Promise<McpTemplate[]>;
|
||||
create(data: CreateTemplateInput): Promise<McpTemplate>;
|
||||
update(id: string, data: UpdateTemplateInput): Promise<McpTemplate>;
|
||||
delete(id: string): Promise<void>;
|
||||
}
|
||||
|
||||
export class TemplateRepository implements ITemplateRepository {
|
||||
constructor(private readonly prisma: PrismaClient) {}
|
||||
|
||||
async findAll(): Promise<McpTemplate[]> {
|
||||
return this.prisma.mcpTemplate.findMany({ orderBy: { name: 'asc' } });
|
||||
}
|
||||
|
||||
async findById(id: string): Promise<McpTemplate | null> {
|
||||
return this.prisma.mcpTemplate.findUnique({ where: { id } });
|
||||
}
|
||||
|
||||
async findByName(name: string): Promise<McpTemplate | null> {
|
||||
return this.prisma.mcpTemplate.findUnique({ where: { name } });
|
||||
}
|
||||
|
||||
async search(pattern: string): Promise<McpTemplate[]> {
|
||||
// Convert glob * to SQL %
|
||||
const sqlPattern = pattern.replace(/\*/g, '%');
|
||||
return this.prisma.mcpTemplate.findMany({
|
||||
where: { name: { contains: sqlPattern.replace(/%/g, ''), mode: 'insensitive' } },
|
||||
orderBy: { name: 'asc' },
|
||||
});
|
||||
}
|
||||
|
||||
async create(data: CreateTemplateInput): Promise<McpTemplate> {
|
||||
return this.prisma.mcpTemplate.create({
|
||||
data: {
|
||||
name: data.name,
|
||||
version: data.version,
|
||||
description: data.description,
|
||||
packageName: data.packageName ?? null,
|
||||
dockerImage: data.dockerImage ?? null,
|
||||
transport: data.transport,
|
||||
repositoryUrl: data.repositoryUrl ?? null,
|
||||
externalUrl: data.externalUrl ?? null,
|
||||
command: (data.command ?? Prisma.JsonNull) as Prisma.InputJsonValue,
|
||||
containerPort: data.containerPort ?? null,
|
||||
replicas: data.replicas,
|
||||
env: (data.env ?? []) as unknown as Prisma.InputJsonValue,
|
||||
healthCheck: (data.healthCheck ?? Prisma.JsonNull) as Prisma.InputJsonValue,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async update(id: string, data: UpdateTemplateInput): Promise<McpTemplate> {
|
||||
const updateData: Record<string, unknown> = {};
|
||||
if (data.version !== undefined) updateData.version = data.version;
|
||||
if (data.description !== undefined) updateData.description = data.description;
|
||||
if (data.packageName !== undefined) updateData.packageName = data.packageName;
|
||||
if (data.dockerImage !== undefined) updateData.dockerImage = data.dockerImage;
|
||||
if (data.transport !== undefined) updateData.transport = data.transport;
|
||||
if (data.repositoryUrl !== undefined) updateData.repositoryUrl = data.repositoryUrl;
|
||||
if (data.externalUrl !== undefined) updateData.externalUrl = data.externalUrl;
|
||||
if (data.command !== undefined) updateData.command = (data.command ?? Prisma.JsonNull) as Prisma.InputJsonValue;
|
||||
if (data.containerPort !== undefined) updateData.containerPort = data.containerPort;
|
||||
if (data.replicas !== undefined) updateData.replicas = data.replicas;
|
||||
if (data.env !== undefined) updateData.env = (data.env ?? []) as Prisma.InputJsonValue;
|
||||
if (data.healthCheck !== undefined) updateData.healthCheck = (data.healthCheck ?? Prisma.JsonNull) as Prisma.InputJsonValue;
|
||||
|
||||
return this.prisma.mcpTemplate.update({
|
||||
where: { id },
|
||||
data: updateData,
|
||||
});
|
||||
}
|
||||
|
||||
async delete(id: string): Promise<void> {
|
||||
await this.prisma.mcpTemplate.delete({ where: { id } });
|
||||
}
|
||||
}
|
||||
39
src/mcpd/src/routes/audit-logs.ts
Normal file
39
src/mcpd/src/routes/audit-logs.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import type { FastifyInstance } from 'fastify';
|
||||
import type { AuditLogService } from '../services/audit-log.service.js';
|
||||
|
||||
interface AuditLogQuery {
|
||||
userId?: string;
|
||||
action?: string;
|
||||
resource?: string;
|
||||
resourceId?: string;
|
||||
since?: string;
|
||||
until?: string;
|
||||
limit?: string;
|
||||
offset?: string;
|
||||
}
|
||||
|
||||
export function registerAuditLogRoutes(app: FastifyInstance, service: AuditLogService): void {
|
||||
app.get<{ Querystring: AuditLogQuery }>('/api/v1/audit-logs', async (request) => {
|
||||
const q = request.query;
|
||||
const params: Record<string, unknown> = {};
|
||||
if (q.userId !== undefined) params.userId = q.userId;
|
||||
if (q.action !== undefined) params.action = q.action;
|
||||
if (q.resource !== undefined) params.resource = q.resource;
|
||||
if (q.resourceId !== undefined) params.resourceId = q.resourceId;
|
||||
if (q.since !== undefined) params.since = q.since;
|
||||
if (q.until !== undefined) params.until = q.until;
|
||||
if (q.limit !== undefined) params.limit = parseInt(q.limit, 10);
|
||||
if (q.offset !== undefined) params.offset = parseInt(q.offset, 10);
|
||||
return service.list(params);
|
||||
});
|
||||
|
||||
app.get<{ Params: { id: string } }>('/api/v1/audit-logs/:id', async (request) => {
|
||||
return service.getById(request.params.id);
|
||||
});
|
||||
|
||||
app.post('/api/v1/audit-logs/purge', async (_request, reply) => {
|
||||
const deleted = await service.purgeExpired();
|
||||
reply.code(200);
|
||||
return { deleted };
|
||||
});
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user