feat(cli)+fix(mcpd): server-side LLM status + SPA fallback 500
Some checks failed
CI/CD / typecheck (pull_request) Successful in 58s
CI/CD / test (pull_request) Successful in 1m9s
CI/CD / lint (pull_request) Successful in 2m14s
CI/CD / smoke (pull_request) Failing after 1m39s
CI/CD / build (pull_request) Successful in 2m14s
CI/CD / publish (pull_request) Has been skipped

Two related fixes:

1. \`mcpctl status\` now lists mcpd-managed Llm rows (the ones created via
   \`mcpctl create llm\`) under a new "Server LLMs:" section, grouped by
   tier with type, model, upstream URL, and key reference. JSON/YAML
   output gains a \`serverLlms\` array.

   Bearer token (from \`mcpctl auth login\` / saved credentials) is
   passed through; if mcpd is unreachable or returns non-200 the
   section is silently omitted (the existing mcpd connectivity line
   already conveys that). 6 new tests cover happy path, empty list,
   token plumbing, and JSON shape.

2. SPA fallback at \`/ui/<deeplink>\` was returning 500 because we
   registered \`@fastify/static\` with \`decorateReply: false\` and then
   called \`reply.sendFile\`. Read index.html once at startup and serve
   it with \`reply.send(html)\` instead — also dodges a per-request
   stat call. Drop \`decorateReply: false\` so future code can use
   reply.sendFile if it ever needs to.

Full suite: 2005/2005 across 149 files.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Michal
2026-04-27 11:27:45 +01:00
parent 899f2c750c
commit 0db37e92a4
3 changed files with 189 additions and 7 deletions

View File

@@ -27,6 +27,7 @@ function baseDeps(overrides?: Partial<StatusCommandDeps>): Partial<StatusCommand
write,
checkHealth: async () => true,
fetchProviders: async () => null,
fetchServerLlms: async () => null,
isTTY: false,
...overrides,
};
@@ -199,4 +200,67 @@ describe('status command', () => {
expect(parsed['llm']).toBeNull();
expect(parsed['llmStatus']).toBeNull();
});
// ── Server LLMs (mcpd-managed Llm rows) ──
it('renders a "Server LLMs:" section grouped by tier in table mode', async () => {
saveCredentials({ token: 't', mcpdUrl: 'http://mcpd', user: 'u' }, { configDir: tempDir });
const cmd = createStatusCommand(baseDeps({
fetchServerLlms: async () => [
{ id: 'l1', name: 'qwen3-thinking', type: 'openai', model: 'qwen3-thinking', tier: 'fast', url: 'http://x:4000/v1', apiKeyRef: { name: 'litellm', key: 'API_KEY' } },
{ id: 'l2', name: 'sonnet', type: 'anthropic', model: 'claude-sonnet-4-5', tier: 'heavy', url: '', apiKeyRef: null },
],
}));
await cmd.parseAsync([], { from: 'user' });
const out = output.join('\n');
expect(out).toContain('Server LLMs: 2 registered');
expect(out).toContain('qwen3-thinking (openai → qwen3-thinking)');
expect(out).toContain('sonnet (anthropic → claude-sonnet-4-5)');
expect(out).toMatch(/fast\s+qwen3-thinking/);
expect(out).toMatch(/heavy\s+sonnet/);
});
it('renders "none registered" when mcpd has no Llm rows', async () => {
const cmd = createStatusCommand(baseDeps({ fetchServerLlms: async () => [] }));
await cmd.parseAsync([], { from: 'user' });
const out = output.join('\n');
expect(out).toContain('Server LLMs: none registered');
expect(out).toContain("'mcpctl create llm'");
});
it('omits the section silently when mcpd is unreachable (fetcher returns null)', async () => {
const cmd = createStatusCommand(baseDeps({ fetchServerLlms: async () => null }));
await cmd.parseAsync([], { from: 'user' });
const out = output.join('\n');
expect(out).not.toContain('Server LLMs');
});
it('passes the bearer token from saved credentials to the fetcher', async () => {
saveCredentials({ token: 'tok-abc', mcpdUrl: 'http://mcpd', user: 'u' }, { configDir: tempDir });
let capturedToken: string | null = '<unseen>';
const cmd = createStatusCommand(baseDeps({
fetchServerLlms: async (_url, token) => { capturedToken = token; return []; },
}));
await cmd.parseAsync([], { from: 'user' });
expect(capturedToken).toBe('tok-abc');
});
it('passes null token when there are no saved credentials', async () => {
let capturedToken: string | null = '<unseen>';
const cmd = createStatusCommand(baseDeps({
fetchServerLlms: async (_url, token) => { capturedToken = token; return []; },
}));
await cmd.parseAsync([], { from: 'user' });
expect(capturedToken).toBeNull();
});
it('includes serverLlms in JSON output', async () => {
const llms = [
{ id: 'l1', name: 'qwen3-thinking', type: 'openai', model: 'qwen3-thinking', tier: 'fast', url: 'http://x', apiKeyRef: null },
];
const cmd = createStatusCommand(baseDeps({ fetchServerLlms: async () => llms }));
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
const parsed = JSON.parse(output[0]) as { serverLlms?: typeof llms };
expect(parsed.serverLlms).toEqual(llms);
});
});