Compare commits

...

2 Commits

Author SHA1 Message Date
Michal
0db37e92a4 feat(cli)+fix(mcpd): server-side LLM status + SPA fallback 500
Some checks failed
CI/CD / typecheck (pull_request) Successful in 58s
CI/CD / test (pull_request) Successful in 1m9s
CI/CD / lint (pull_request) Successful in 2m14s
CI/CD / smoke (pull_request) Failing after 1m39s
CI/CD / build (pull_request) Successful in 2m14s
CI/CD / publish (pull_request) Has been skipped
Two related fixes:

1. \`mcpctl status\` now lists mcpd-managed Llm rows (the ones created via
   \`mcpctl create llm\`) under a new "Server LLMs:" section, grouped by
   tier with type, model, upstream URL, and key reference. JSON/YAML
   output gains a \`serverLlms\` array.

   Bearer token (from \`mcpctl auth login\` / saved credentials) is
   passed through; if mcpd is unreachable or returns non-200 the
   section is silently omitted (the existing mcpd connectivity line
   already conveys that). 6 new tests cover happy path, empty list,
   token plumbing, and JSON shape.

2. SPA fallback at \`/ui/<deeplink>\` was returning 500 because we
   registered \`@fastify/static\` with \`decorateReply: false\` and then
   called \`reply.sendFile\`. Read index.html once at startup and serve
   it with \`reply.send(html)\` instead — also dodges a per-request
   stat call. Drop \`decorateReply: false\` so future code can use
   reply.sendFile if it ever needs to.

Full suite: 2005/2005 across 149 files.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
2026-04-27 11:27:45 +01:00
899f2c750c fix(test): vitest 4 projects + src/web jsdom env (#59)
Some checks failed
CI/CD / lint (push) Successful in 55s
CI/CD / test (push) Successful in 1m10s
CI/CD / typecheck (push) Successful in 2m37s
CI/CD / smoke (push) Failing after 1m41s
CI/CD / build (push) Successful in 2m38s
CI/CD / publish (push) Has been skipped
2026-04-26 20:31:47 +00:00
3 changed files with 189 additions and 7 deletions

View File

@@ -34,6 +34,16 @@ interface ProvidersInfo {
details?: Record<string, ProviderDetail>;
}
interface ServerLlm {
id: string;
name: string;
type: string;
model: string;
tier: string;
url: string;
apiKeyRef?: { name: string; key: string } | null;
}
export interface StatusCommandDeps {
configDeps: Partial<ConfigLoaderDeps>;
credentialsDeps: Partial<CredentialsDeps>;
@@ -46,6 +56,12 @@ export interface StatusCommandDeps {
fetchModels: (mcplocalUrl: string) => Promise<string[]>;
/** Fetch provider tier info from mcplocal's /llm/providers endpoint */
fetchProviders: (mcplocalUrl: string) => Promise<ProvidersInfo | null>;
/**
* Fetch server-managed LLMs from mcpd (`mcpctl create llm` rows). Returns
* null on auth failure, network error, or any other unhappy path so the
* command stays printable even when mcpd is unreachable.
*/
fetchServerLlms: (mcpdUrl: string, token: string | null) => Promise<ServerLlm[] | null>;
isTTY: boolean;
}
@@ -156,6 +172,39 @@ function defaultFetchProviders(mcplocalUrl: string): Promise<ProvidersInfo | nul
});
}
/**
* Fetch server-managed LLMs (the rows created by `mcpctl create llm`).
* Goes directly to mcpd because mcplocal does not proxy /api/v1/llms.
* Returns null on any error so the caller can decide whether to render
* a "not available" line vs. spilling stack traces into the status view.
*/
function defaultFetchServerLlms(mcpdUrl: string, token: string | null): Promise<ServerLlm[] | null> {
return new Promise((resolve) => {
let req: http.ClientRequest;
const headers: Record<string, string> = { Accept: 'application/json' };
if (token !== null) headers['Authorization'] = `Bearer ${token}`;
try {
req = httpDriverFor(mcpdUrl).get(`${mcpdUrl}/api/v1/llms`, { timeout: 5000, headers }, (res) => {
if (res.statusCode !== 200) { resolve(null); res.resume(); return; }
const chunks: Buffer[] = [];
res.on('data', (chunk: Buffer) => chunks.push(chunk));
res.on('end', () => {
try {
resolve(JSON.parse(Buffer.concat(chunks).toString('utf-8')) as ServerLlm[]);
} catch {
resolve(null);
}
});
});
} catch {
resolve(null);
return;
}
req.on('error', () => resolve(null));
req.on('timeout', () => { req.destroy(); resolve(null); });
});
}
const SPINNER_FRAMES = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];
const defaultDeps: StatusCommandDeps = {
@@ -167,6 +216,7 @@ const defaultDeps: StatusCommandDeps = {
checkLlm: defaultCheckLlm,
fetchModels: defaultFetchModels,
fetchProviders: defaultFetchProviders,
fetchServerLlms: defaultFetchServerLlms,
isTTY: process.stdout.isTTY ?? false,
};
@@ -228,7 +278,7 @@ function formatProviderStatus(name: string, info: ProvidersInfo, ansi: boolean):
}
export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command {
const { configDeps, credentialsDeps, log, write, checkHealth, checkLlm, fetchModels, fetchProviders, isTTY } = { ...defaultDeps, ...deps };
const { configDeps, credentialsDeps, log, write, checkHealth, checkLlm, fetchModels, fetchProviders, fetchServerLlms, isTTY } = { ...defaultDeps, ...deps };
return new Command('status')
.description('Show mcpctl status and connectivity')
@@ -242,11 +292,12 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
if (opts.output !== 'table') {
// JSON/YAML: run everything in parallel, wait, output at once
const [mcplocalReachable, mcpdReachable, llmStatus, providersInfo] = await Promise.all([
const [mcplocalReachable, mcpdReachable, llmStatus, providersInfo, serverLlms] = await Promise.all([
checkHealth(config.mcplocalUrl),
checkHealth(config.mcpdUrl),
llmLabel ? checkLlm(config.mcplocalUrl) : Promise.resolve(null),
multiProvider ? fetchProviders(config.mcplocalUrl) : Promise.resolve(null),
fetchServerLlms(config.mcpdUrl, creds?.token ?? null),
]);
const llm = llmLabel
@@ -265,6 +316,7 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
llm,
llmStatus,
...(providersInfo ? { providers: providersInfo } : {}),
...(serverLlms !== null ? { serverLlms } : {}),
};
log(opts.output === 'json' ? formatJson(status) : formatYaml(status));
@@ -286,8 +338,14 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
log(`Registries: ${config.registries.join(', ')}`);
log(`Output: ${config.outputFormat}`);
// Server LLMs (mcpd-managed) — fetched in parallel regardless of the
// local-LLM config, so the section renders even on machines without
// a configured client-side provider.
const serverLlmsPromise = fetchServerLlms(config.mcpdUrl, creds?.token ?? null);
if (!llmLabel) {
log(`LLM: not configured (run 'mcpctl config setup')`);
await renderServerLlmsSection(serverLlmsPromise, isTTY);
return;
}
@@ -350,5 +408,51 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
log(`${DIM} Available: ${models.join(', ')}${RESET}`);
}
}
await renderServerLlmsSection(serverLlmsPromise, isTTY);
});
/**
* Print a "Server LLMs:" section listing mcpd-managed Llm rows by tier.
* These are the rows created via `mcpctl create llm` — distinct from the
* mcplocal-side providers shown by the existing "LLM:" lines above. The
* caller awaits a pre-launched promise so this doesn't add round-trips.
*/
async function renderServerLlmsSection(
serverLlmsPromise: Promise<ServerLlm[] | null>,
ansi: boolean,
): Promise<void> {
const llms = await serverLlmsPromise;
if (llms === null) {
// Auth failure / unreachable mcpd: fold into the existing mcpd-status
// signal we already printed; nothing more to say here.
return;
}
if (llms.length === 0) {
log(`Server LLMs: none registered ${ansi ? DIM : ''}(use 'mcpctl create llm')${ansi ? RESET : ''}`);
return;
}
const byTier = new Map<string, ServerLlm[]>();
for (const l of llms) {
const arr = byTier.get(l.tier) ?? [];
arr.push(l);
byTier.set(l.tier, arr);
}
log(`Server LLMs: ${String(llms.length)} registered`);
// Print tiers in a stable order — fast/heavy first, then anything else.
const tierOrder = ['fast', 'heavy', ...[...byTier.keys()].filter((t) => t !== 'fast' && t !== 'heavy').sort()];
for (const tier of tierOrder) {
const rows = byTier.get(tier);
if (rows === undefined || rows.length === 0) continue;
const formatted = rows.map((r) => {
const upstream = r.url !== '' ? r.url : 'provider default';
const auth = r.apiKeyRef ? `key:${r.apiKeyRef.name}/${r.apiKeyRef.key}` : 'no key';
const line = `${r.name} (${r.type}${r.model}) ${upstream} ${auth}`;
return ansi ? `${DIM}${line}${RESET}` : line;
});
log(` ${tier.padEnd(6)} ${formatted.join('\n ')}`);
}
}
}

View File

@@ -27,6 +27,7 @@ function baseDeps(overrides?: Partial<StatusCommandDeps>): Partial<StatusCommand
write,
checkHealth: async () => true,
fetchProviders: async () => null,
fetchServerLlms: async () => null,
isTTY: false,
...overrides,
};
@@ -199,4 +200,67 @@ describe('status command', () => {
expect(parsed['llm']).toBeNull();
expect(parsed['llmStatus']).toBeNull();
});
// ── Server LLMs (mcpd-managed Llm rows) ──
it('renders a "Server LLMs:" section grouped by tier in table mode', async () => {
saveCredentials({ token: 't', mcpdUrl: 'http://mcpd', user: 'u' }, { configDir: tempDir });
const cmd = createStatusCommand(baseDeps({
fetchServerLlms: async () => [
{ id: 'l1', name: 'qwen3-thinking', type: 'openai', model: 'qwen3-thinking', tier: 'fast', url: 'http://x:4000/v1', apiKeyRef: { name: 'litellm', key: 'API_KEY' } },
{ id: 'l2', name: 'sonnet', type: 'anthropic', model: 'claude-sonnet-4-5', tier: 'heavy', url: '', apiKeyRef: null },
],
}));
await cmd.parseAsync([], { from: 'user' });
const out = output.join('\n');
expect(out).toContain('Server LLMs: 2 registered');
expect(out).toContain('qwen3-thinking (openai → qwen3-thinking)');
expect(out).toContain('sonnet (anthropic → claude-sonnet-4-5)');
expect(out).toMatch(/fast\s+qwen3-thinking/);
expect(out).toMatch(/heavy\s+sonnet/);
});
it('renders "none registered" when mcpd has no Llm rows', async () => {
const cmd = createStatusCommand(baseDeps({ fetchServerLlms: async () => [] }));
await cmd.parseAsync([], { from: 'user' });
const out = output.join('\n');
expect(out).toContain('Server LLMs: none registered');
expect(out).toContain("'mcpctl create llm'");
});
it('omits the section silently when mcpd is unreachable (fetcher returns null)', async () => {
const cmd = createStatusCommand(baseDeps({ fetchServerLlms: async () => null }));
await cmd.parseAsync([], { from: 'user' });
const out = output.join('\n');
expect(out).not.toContain('Server LLMs');
});
it('passes the bearer token from saved credentials to the fetcher', async () => {
saveCredentials({ token: 'tok-abc', mcpdUrl: 'http://mcpd', user: 'u' }, { configDir: tempDir });
let capturedToken: string | null = '<unseen>';
const cmd = createStatusCommand(baseDeps({
fetchServerLlms: async (_url, token) => { capturedToken = token; return []; },
}));
await cmd.parseAsync([], { from: 'user' });
expect(capturedToken).toBe('tok-abc');
});
it('passes null token when there are no saved credentials', async () => {
let capturedToken: string | null = '<unseen>';
const cmd = createStatusCommand(baseDeps({
fetchServerLlms: async (_url, token) => { capturedToken = token; return []; },
}));
await cmd.parseAsync([], { from: 'user' });
expect(capturedToken).toBeNull();
});
it('includes serverLlms in JSON output', async () => {
const llms = [
{ id: 'l1', name: 'qwen3-thinking', type: 'openai', model: 'qwen3-thinking', tier: 'fast', url: 'http://x', apiKeyRef: null },
];
const cmd = createStatusCommand(baseDeps({ fetchServerLlms: async () => llms }));
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
const parsed = JSON.parse(output[0]) as { serverLlms?: typeof llms };
expect(parsed.serverLlms).toEqual(llms);
});
});

View File

@@ -13,7 +13,7 @@
* index.html so client-side react-router routes work on direct hits.
*/
import path from 'node:path';
import { existsSync, statSync } from 'node:fs';
import { existsSync, statSync, readFileSync } from 'node:fs';
import { fileURLToPath } from 'node:url';
import type { FastifyInstance } from 'fastify';
import fastifyStatic from '@fastify/static';
@@ -57,13 +57,27 @@ export async function registerWebUi(app: FastifyInstance): Promise<void> {
root,
prefix: '/ui/',
wildcard: false,
decorateReply: false,
});
// SPA fallback — react-router URLs like /ui/agents/foo/personalities/bar
// need index.html to bootstrap the app.
// Read index.html once at startup; the SPA fallback below serves it
// verbatim for every unmatched /ui/* path so client-side routing works
// on direct hits. Reading once also dodges a per-request `sendFile`
// call — there's only one file ever served from this handler.
const indexHtmlPath = path.join(root, 'index.html');
const indexHtml = existsSync(indexHtmlPath)
? readFileSync(indexHtmlPath, 'utf-8')
: null;
if (indexHtml === null) {
app.log.warn({ root }, 'web UI index.html missing; deep links to /ui/<path> will 404');
}
app.get('/ui/*', (_request, reply) => {
return reply.sendFile('index.html', root);
if (indexHtml === null) {
reply.code(404);
return { error: 'index.html missing from web UI bundle' };
}
reply.type('text/html').send(indexHtml);
return reply;
});
// Cover the bare /ui (no trailing slash) too.
app.get('/ui', (_request, reply) => {