All checks were successful
CI/CD / lint (pull_request) Successful in 1m40s
CI/CD / typecheck (pull_request) Successful in 1m35s
CI/CD / test (pull_request) Successful in 2m16s
CI/CD / build (pull_request) Successful in 2m17s
CI/CD / smoke (pull_request) Successful in 4m37s
CI/CD / publish (pull_request) Has been skipped
- status.ts + api-client.ts now dispatch on URL scheme so an https mcpd URL no longer crashes with "Protocol https: not supported". Caught by fulldeploy smoke runs — status.ts had `import http` only and was synchronously throwing against https://mcpctl.ad.itaz.eu. Each http.get call is wrapped so future scheme-mismatch errors also degrade to "unreachable" instead of a stack trace. - .dockerignore no longer excludes src/mcplocal/ (the new Dockerfile.mcplocal needs those files). - scripts/demo-mcp-call.py: standalone, stdlib-only Python demo that makes an MCP request (initialize + tools/list, optional tools/call) using an mcpctl_pat_ bearer. Counterpart to `mcpctl test mcp` for showing external (e.g. vLLM) clients how the bearer flow works. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
355 lines
13 KiB
TypeScript
355 lines
13 KiB
TypeScript
import { Command } from 'commander';
|
|
import http from 'node:http';
|
|
import https from 'node:https';
|
|
|
|
/** Pick the http or https driver based on the URL scheme. */
|
|
function httpDriverFor(url: string): typeof http | typeof https {
|
|
return new URL(url).protocol === 'https:' ? https : http;
|
|
}
|
|
import { loadConfig } from '../config/index.js';
|
|
import type { ConfigLoaderDeps } from '../config/index.js';
|
|
import { loadCredentials } from '../auth/index.js';
|
|
import type { CredentialsDeps } from '../auth/index.js';
|
|
import { formatJson, formatYaml } from '../formatters/index.js';
|
|
import { APP_VERSION } from '@mcpctl/shared';
|
|
|
|
// ANSI helpers
|
|
const GREEN = '\x1b[32m';
|
|
const RED = '\x1b[31m';
|
|
const YELLOW = '\x1b[33m';
|
|
const DIM = '\x1b[2m';
|
|
const RESET = '\x1b[0m';
|
|
const CLEAR_LINE = '\x1b[2K\r';
|
|
|
|
interface ProviderDetail {
|
|
managed: boolean;
|
|
state?: string;
|
|
lastError?: string;
|
|
}
|
|
|
|
interface ProvidersInfo {
|
|
providers: string[];
|
|
tiers: { fast: string[]; heavy: string[] };
|
|
health: Record<string, boolean>;
|
|
details?: Record<string, ProviderDetail>;
|
|
}
|
|
|
|
export interface StatusCommandDeps {
|
|
configDeps: Partial<ConfigLoaderDeps>;
|
|
credentialsDeps: Partial<CredentialsDeps>;
|
|
log: (...args: string[]) => void;
|
|
write: (text: string) => void;
|
|
checkHealth: (url: string) => Promise<boolean>;
|
|
/** Check LLM health via mcplocal's /llm/health endpoint */
|
|
checkLlm: (mcplocalUrl: string) => Promise<string>;
|
|
/** Fetch available models from mcplocal's /llm/models endpoint */
|
|
fetchModels: (mcplocalUrl: string) => Promise<string[]>;
|
|
/** Fetch provider tier info from mcplocal's /llm/providers endpoint */
|
|
fetchProviders: (mcplocalUrl: string) => Promise<ProvidersInfo | null>;
|
|
isTTY: boolean;
|
|
}
|
|
|
|
function defaultCheckHealth(url: string): Promise<boolean> {
|
|
return new Promise((resolve) => {
|
|
let req: http.ClientRequest;
|
|
try {
|
|
req = httpDriverFor(url).get(`${url}/health`, { timeout: 3000 }, (res) => {
|
|
resolve(res.statusCode !== undefined && res.statusCode >= 200 && res.statusCode < 400);
|
|
res.resume();
|
|
});
|
|
} catch {
|
|
resolve(false);
|
|
return;
|
|
}
|
|
req.on('error', () => resolve(false));
|
|
req.on('timeout', () => {
|
|
req.destroy();
|
|
resolve(false);
|
|
});
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Check LLM health by querying mcplocal's /llm/health endpoint.
|
|
* This tests the actual provider running inside the daemon (uses persistent ACP for gemini, etc.)
|
|
*/
|
|
function defaultCheckLlm(mcplocalUrl: string): Promise<string> {
|
|
return new Promise((resolve) => {
|
|
let req: http.ClientRequest;
|
|
try {
|
|
req = httpDriverFor(mcplocalUrl).get(`${mcplocalUrl}/llm/health`, { timeout: 45000 }, (res) => {
|
|
const chunks: Buffer[] = [];
|
|
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
|
res.on('end', () => {
|
|
try {
|
|
const body = JSON.parse(Buffer.concat(chunks).toString('utf-8')) as { status: string; error?: string };
|
|
if (body.status === 'ok') {
|
|
resolve('ok');
|
|
} else if (body.status === 'not configured') {
|
|
resolve('not configured');
|
|
} else if (body.error) {
|
|
resolve(body.error.slice(0, 80));
|
|
} else {
|
|
resolve(body.status);
|
|
}
|
|
} catch {
|
|
resolve('invalid response');
|
|
}
|
|
});
|
|
});
|
|
} catch {
|
|
resolve('mcplocal unreachable');
|
|
return;
|
|
}
|
|
req.on('error', () => resolve('mcplocal unreachable'));
|
|
req.on('timeout', () => { req.destroy(); resolve('timeout'); });
|
|
});
|
|
}
|
|
|
|
function defaultFetchModels(mcplocalUrl: string): Promise<string[]> {
|
|
return new Promise((resolve) => {
|
|
let req: http.ClientRequest;
|
|
try {
|
|
req = httpDriverFor(mcplocalUrl).get(`${mcplocalUrl}/llm/models`, { timeout: 5000 }, (res) => {
|
|
const chunks: Buffer[] = [];
|
|
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
|
res.on('end', () => {
|
|
try {
|
|
const body = JSON.parse(Buffer.concat(chunks).toString('utf-8')) as { models?: string[] };
|
|
resolve(body.models ?? []);
|
|
} catch {
|
|
resolve([]);
|
|
}
|
|
});
|
|
});
|
|
} catch {
|
|
resolve([]);
|
|
return;
|
|
}
|
|
req.on('error', () => resolve([]));
|
|
req.on('timeout', () => { req.destroy(); resolve([]); });
|
|
});
|
|
}
|
|
|
|
function defaultFetchProviders(mcplocalUrl: string): Promise<ProvidersInfo | null> {
|
|
return new Promise((resolve) => {
|
|
let req: http.ClientRequest;
|
|
try {
|
|
req = httpDriverFor(mcplocalUrl).get(`${mcplocalUrl}/llm/providers`, { timeout: 5000 }, (res) => {
|
|
const chunks: Buffer[] = [];
|
|
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
|
res.on('end', () => {
|
|
try {
|
|
const body = JSON.parse(Buffer.concat(chunks).toString('utf-8')) as ProvidersInfo;
|
|
resolve(body);
|
|
} catch {
|
|
resolve(null);
|
|
}
|
|
});
|
|
});
|
|
} catch {
|
|
resolve(null);
|
|
return;
|
|
}
|
|
req.on('error', () => resolve(null));
|
|
req.on('timeout', () => { req.destroy(); resolve(null); });
|
|
});
|
|
}
|
|
|
|
const SPINNER_FRAMES = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];
|
|
|
|
const defaultDeps: StatusCommandDeps = {
|
|
configDeps: {},
|
|
credentialsDeps: {},
|
|
log: (...args) => console.log(...args),
|
|
write: (text) => process.stdout.write(text),
|
|
checkHealth: defaultCheckHealth,
|
|
checkLlm: defaultCheckLlm,
|
|
fetchModels: defaultFetchModels,
|
|
fetchProviders: defaultFetchProviders,
|
|
isTTY: process.stdout.isTTY ?? false,
|
|
};
|
|
|
|
/** Determine LLM label from config (handles both legacy and multi-provider formats). */
|
|
function getLlmLabel(llm: unknown): string | null {
|
|
if (!llm || typeof llm !== 'object') return null;
|
|
// Legacy format: { provider, model }
|
|
if ('provider' in llm) {
|
|
const legacy = llm as { provider: string; model?: string };
|
|
if (legacy.provider === 'none') return null;
|
|
return `${legacy.provider}${legacy.model ? ` / ${legacy.model}` : ''}`;
|
|
}
|
|
// Multi-provider format: { providers: [...] }
|
|
if ('providers' in llm) {
|
|
const multi = llm as { providers: Array<{ name: string; type: string; tier?: string }> };
|
|
if (multi.providers.length === 0) return null;
|
|
return multi.providers.map((p) => `${p.name}${p.tier ? ` (${p.tier})` : ''}`).join(', ');
|
|
}
|
|
return null;
|
|
}
|
|
|
|
/** Check if config uses multi-provider format. */
|
|
function isMultiProvider(llm: unknown): boolean {
|
|
return !!llm && typeof llm === 'object' && 'providers' in llm;
|
|
}
|
|
|
|
/**
|
|
* Format a single provider's status string for display.
|
|
* Managed providers show lifecycle state; regular providers show health check result.
|
|
*/
|
|
function formatProviderStatus(name: string, info: ProvidersInfo, ansi: boolean): string {
|
|
const detail = info.details?.[name];
|
|
if (detail?.managed) {
|
|
switch (detail.state) {
|
|
case 'running':
|
|
return ansi ? `${name} ${GREEN}✓ running${RESET}` : `${name} ✓ running`;
|
|
case 'stopped':
|
|
return ansi
|
|
? `${name} ${DIM}○ stopped (auto-starts on demand)${RESET}`
|
|
: `${name} ○ stopped (auto-starts on demand)`;
|
|
case 'starting':
|
|
return ansi ? `${name} ${YELLOW}⟳ starting...${RESET}` : `${name} ⟳ starting...`;
|
|
case 'error':
|
|
return ansi
|
|
? `${name} ${RED}✗ error: ${detail.lastError ?? 'unknown'}${RESET}`
|
|
: `${name} ✗ error: ${detail.lastError ?? 'unknown'}`;
|
|
default: {
|
|
const ok = info.health[name];
|
|
return ansi
|
|
? ok ? `${name} ${GREEN}✓${RESET}` : `${name} ${RED}✗${RESET}`
|
|
: ok ? `${name} ✓` : `${name} ✗`;
|
|
}
|
|
}
|
|
}
|
|
const ok = info.health[name];
|
|
return ansi
|
|
? ok ? `${name} ${GREEN}✓${RESET}` : `${name} ${RED}✗${RESET}`
|
|
: ok ? `${name} ✓` : `${name} ✗`;
|
|
}
|
|
|
|
export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command {
|
|
const { configDeps, credentialsDeps, log, write, checkHealth, checkLlm, fetchModels, fetchProviders, isTTY } = { ...defaultDeps, ...deps };
|
|
|
|
return new Command('status')
|
|
.description('Show mcpctl status and connectivity')
|
|
.option('-o, --output <format>', 'output format (table, json, yaml)', 'table')
|
|
.action(async (opts: { output: string }) => {
|
|
const config = loadConfig(configDeps);
|
|
const creds = loadCredentials(credentialsDeps);
|
|
|
|
const llmLabel = getLlmLabel(config.llm);
|
|
const multiProvider = isMultiProvider(config.llm);
|
|
|
|
if (opts.output !== 'table') {
|
|
// JSON/YAML: run everything in parallel, wait, output at once
|
|
const [mcplocalReachable, mcpdReachable, llmStatus, providersInfo] = await Promise.all([
|
|
checkHealth(config.mcplocalUrl),
|
|
checkHealth(config.mcpdUrl),
|
|
llmLabel ? checkLlm(config.mcplocalUrl) : Promise.resolve(null),
|
|
multiProvider ? fetchProviders(config.mcplocalUrl) : Promise.resolve(null),
|
|
]);
|
|
|
|
const llm = llmLabel
|
|
? llmStatus === 'ok' ? llmLabel : `${llmLabel} (${llmStatus})`
|
|
: null;
|
|
|
|
const status = {
|
|
version: APP_VERSION,
|
|
mcplocalUrl: config.mcplocalUrl,
|
|
mcplocalReachable,
|
|
mcpdUrl: config.mcpdUrl,
|
|
mcpdReachable,
|
|
auth: creds ? { user: creds.user } : null,
|
|
registries: config.registries,
|
|
outputFormat: config.outputFormat,
|
|
llm,
|
|
llmStatus,
|
|
...(providersInfo ? { providers: providersInfo } : {}),
|
|
};
|
|
|
|
log(opts.output === 'json' ? formatJson(status) : formatYaml(status));
|
|
return;
|
|
}
|
|
|
|
// Table format: print lines progressively, LLM last with spinner
|
|
|
|
// Fast health checks first
|
|
const [mcplocalReachable, mcpdReachable] = await Promise.all([
|
|
checkHealth(config.mcplocalUrl),
|
|
checkHealth(config.mcpdUrl),
|
|
]);
|
|
|
|
log(`mcpctl v${APP_VERSION}`);
|
|
log(`mcplocal: ${config.mcplocalUrl} (${mcplocalReachable ? 'connected' : 'unreachable'})`);
|
|
log(`mcpd: ${config.mcpdUrl} (${mcpdReachable ? 'connected' : 'unreachable'})`);
|
|
log(`Auth: ${creds ? `logged in as ${creds.user}` : 'not logged in'}`);
|
|
log(`Registries: ${config.registries.join(', ')}`);
|
|
log(`Output: ${config.outputFormat}`);
|
|
|
|
if (!llmLabel) {
|
|
log(`LLM: not configured (run 'mcpctl config setup')`);
|
|
return;
|
|
}
|
|
|
|
// LLM check + models + providers fetch in parallel
|
|
const llmPromise = checkLlm(config.mcplocalUrl);
|
|
const modelsPromise = fetchModels(config.mcplocalUrl);
|
|
const providersPromise = multiProvider ? fetchProviders(config.mcplocalUrl) : Promise.resolve(null);
|
|
|
|
if (isTTY) {
|
|
let frame = 0;
|
|
const interval = setInterval(() => {
|
|
write(`${CLEAR_LINE}LLM: ${DIM}${SPINNER_FRAMES[frame % SPINNER_FRAMES.length]} checking...${RESET}`);
|
|
frame++;
|
|
}, 80);
|
|
|
|
const [llmStatus, models, providersInfo] = await Promise.all([llmPromise, modelsPromise, providersPromise]);
|
|
clearInterval(interval);
|
|
|
|
if (providersInfo && (providersInfo.tiers.fast.length > 0 || providersInfo.tiers.heavy.length > 0)) {
|
|
// Tiered display with per-provider health
|
|
write(`${CLEAR_LINE}`);
|
|
for (const tier of ['fast', 'heavy'] as const) {
|
|
const names = providersInfo.tiers[tier];
|
|
if (names.length === 0) continue;
|
|
const label = tier === 'fast' ? 'LLM (fast): ' : 'LLM (heavy):';
|
|
const parts = names.map((n) => formatProviderStatus(n, providersInfo, true));
|
|
log(`${label} ${parts.join(', ')}`);
|
|
}
|
|
} else {
|
|
// Legacy single provider display
|
|
if (llmStatus === 'ok' || llmStatus === 'ok (key stored)') {
|
|
write(`${CLEAR_LINE}LLM: ${llmLabel} ${GREEN}✓ ${llmStatus}${RESET}\n`);
|
|
} else {
|
|
write(`${CLEAR_LINE}LLM: ${llmLabel} ${RED}✗ ${llmStatus}${RESET}\n`);
|
|
}
|
|
}
|
|
if (models.length > 0) {
|
|
log(`${DIM} Available: ${models.join(', ')}${RESET}`);
|
|
}
|
|
} else {
|
|
// Non-TTY: no spinner, just wait and print
|
|
const [llmStatus, models, providersInfo] = await Promise.all([llmPromise, modelsPromise, providersPromise]);
|
|
|
|
if (providersInfo && (providersInfo.tiers.fast.length > 0 || providersInfo.tiers.heavy.length > 0)) {
|
|
for (const tier of ['fast', 'heavy'] as const) {
|
|
const names = providersInfo.tiers[tier];
|
|
if (names.length === 0) continue;
|
|
const label = tier === 'fast' ? 'LLM (fast): ' : 'LLM (heavy):';
|
|
const parts = names.map((n) => formatProviderStatus(n, providersInfo, false));
|
|
log(`${label} ${parts.join(', ')}`);
|
|
}
|
|
} else {
|
|
if (llmStatus === 'ok' || llmStatus === 'ok (key stored)') {
|
|
log(`LLM: ${llmLabel} ✓ ${llmStatus}`);
|
|
} else {
|
|
log(`LLM: ${llmLabel} ✗ ${llmStatus}`);
|
|
}
|
|
}
|
|
if (models.length > 0) {
|
|
log(`${DIM} Available: ${models.join(', ')}${RESET}`);
|
|
}
|
|
}
|
|
});
|
|
}
|