feat: auto-detect gemini binary path, LLM health check in status

- Setup wizard auto-detects gemini binary via `which`, saves full path
  so systemd service can find it without user PATH
- `mcpctl status` tests LLM provider health (gemini: quick prompt test,
  ollama: health check, API providers: key stored confirmation)
- Shows error details inline: "gemini-cli / gemini-2.5-flash (not authenticated)"

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Michal
2026-02-24 23:24:31 +00:00
parent 3ff39ff1ee
commit 36cd0bbec4
4 changed files with 158 additions and 26 deletions

View File

@@ -141,18 +141,48 @@ describe('status command', () => {
expect(out).toContain('mcpctl config setup');
});
it('shows configured LLM provider and model', async () => {
it('shows configured LLM provider and model when healthy', async () => {
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'anthropic', model: 'claude-haiku-3-5-20241022' } }, { configDir: tempDir });
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkHealth: async () => true,
checkLlm: async () => 'ok',
});
await cmd.parseAsync([], { from: 'user' });
const out = output.join('\n');
expect(out).toContain('LLM:');
expect(out).toContain('anthropic / claude-haiku-3-5-20241022');
// Should NOT show error status when ok
expect(out).not.toContain('(ok)');
});
it('shows LLM error status when check fails', async () => {
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkHealth: async () => true,
checkLlm: async () => 'not authenticated',
});
await cmd.parseAsync([], { from: 'user' });
const out = output.join('\n');
expect(out).toContain('gemini-cli / gemini-2.5-flash (not authenticated)');
});
it('shows binary not found status', async () => {
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkHealth: async () => true,
checkLlm: async () => 'binary not found',
});
await cmd.parseAsync([], { from: 'user' });
expect(output.join('\n')).toContain('(binary not found)');
});
it('shows not configured when LLM provider is none', async () => {
@@ -167,17 +197,19 @@ describe('status command', () => {
expect(output.join('\n')).toContain('not configured');
});
it('includes llm field in JSON output', async () => {
it('includes llm and llmStatus in JSON output', async () => {
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkHealth: async () => true,
checkLlm: async () => 'ok',
});
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
expect(parsed['llm']).toBe('gemini-cli / gemini-2.5-flash');
expect(parsed['llmStatus']).toBe('ok');
});
it('includes null llm in JSON output when not configured', async () => {
@@ -190,5 +222,6 @@ describe('status command', () => {
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
expect(parsed['llm']).toBeNull();
expect(parsed['llmStatus']).toBeNull();
});
});