feat: auto-detect gemini binary path, LLM health check in status
Some checks are pending
CI / lint (push) Waiting to run
CI / typecheck (push) Waiting to run
CI / test (push) Waiting to run
CI / build (push) Blocked by required conditions
CI / package (push) Blocked by required conditions

- Setup wizard auto-detects gemini binary via `which`, saves full path
  so systemd service can find it without user PATH
- `mcpctl status` tests LLM provider health (gemini: quick prompt test,
  ollama: health check, API providers: key stored confirmation)
- Shows error details inline: "gemini-cli / gemini-2.5-flash (not authenticated)"

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Michal
2026-02-24 23:24:31 +00:00
parent 869217a07a
commit 848868d45f
4 changed files with 158 additions and 26 deletions

View File

@@ -1,11 +1,15 @@
import { Command } from 'commander';
import http from 'node:http';
import https from 'node:https';
import { execFile } from 'node:child_process';
import { promisify } from 'node:util';
import { loadConfig, saveConfig } from '../config/index.js';
import type { ConfigLoaderDeps, McpctlConfig, LlmConfig, LlmProviderName } from '../config/index.js';
import type { SecretStore } from '@mcpctl/shared';
import { createSecretStore } from '@mcpctl/shared';
const execFileAsync = promisify(execFile);
export interface ConfigSetupPrompt {
select<T>(message: string, choices: Array<{ name: string; value: T; description?: string }>): Promise<T>;
input(message: string, defaultValue?: string): Promise<string>;
@@ -19,6 +23,7 @@ export interface ConfigSetupDeps {
log: (...args: string[]) => void;
prompt: ConfigSetupPrompt;
fetchModels: (url: string, path: string) => Promise<string[]>;
whichBinary: (name: string) => Promise<string | null>;
}
interface ProviderChoice {
@@ -130,6 +135,16 @@ const defaultPrompt: ConfigSetupPrompt = {
confirm: defaultConfirm,
};
async function defaultWhichBinary(name: string): Promise<string | null> {
try {
const { stdout } = await execFileAsync('which', [name], { timeout: 3000 });
const path = stdout.trim();
return path || null;
} catch {
return null;
}
}
export function createConfigSetupCommand(deps?: Partial<ConfigSetupDeps>): Command {
return new Command('setup')
.description('Interactive LLM provider setup wizard')
@@ -138,6 +153,7 @@ export function createConfigSetupCommand(deps?: Partial<ConfigSetupDeps>): Comma
const log = deps?.log ?? ((...args: string[]) => console.log(...args));
const prompt = deps?.prompt ?? defaultPrompt;
const fetchModels = deps?.fetchModels ?? defaultFetchModels;
const whichBinary = deps?.whichBinary ?? defaultWhichBinary;
const secretStore = deps?.secretStore ?? await createSecretStore();
const config = loadConfig(configDeps);
@@ -164,7 +180,7 @@ export function createConfigSetupCommand(deps?: Partial<ConfigSetupDeps>): Comma
switch (provider) {
case 'gemini-cli':
llmConfig = await setupGeminiCli(prompt, currentLlm);
llmConfig = await setupGeminiCli(prompt, log, whichBinary, currentLlm);
break;
case 'ollama':
llmConfig = await setupOllama(prompt, fetchModels, currentLlm);
@@ -192,7 +208,12 @@ export function createConfigSetupCommand(deps?: Partial<ConfigSetupDeps>): Comma
});
}
async function setupGeminiCli(prompt: ConfigSetupPrompt, current?: LlmConfig): Promise<LlmConfig> {
async function setupGeminiCli(
prompt: ConfigSetupPrompt,
log: (...args: string[]) => void,
whichBinary: (name: string) => Promise<string | null>,
current?: LlmConfig,
): Promise<LlmConfig> {
const model = await prompt.select<string>('Select model:', [
...GEMINI_MODELS.map((m) => ({
name: m === current?.model ? `${m} (current)` : m,
@@ -205,10 +226,17 @@ async function setupGeminiCli(prompt: ConfigSetupPrompt, current?: LlmConfig): P
? await prompt.input('Model name:', current?.model)
: model;
const customBinary = await prompt.confirm('Use custom binary path?', false);
const binaryPath = customBinary
? await prompt.input('Binary path:', current?.binaryPath ?? 'gemini')
: undefined;
// Auto-detect gemini binary path
let binaryPath: string | undefined;
const detected = await whichBinary('gemini');
if (detected) {
log(`Found gemini at: ${detected}`);
binaryPath = detected;
} else {
log('Warning: gemini binary not found in PATH');
const manualPath = await prompt.input('Binary path (or install with: npm i -g @google/gemini-cli):');
if (manualPath) binaryPath = manualPath;
}
return { provider: 'gemini-cli', model: finalModel, binaryPath };
}

View File

@@ -1,17 +1,22 @@
import { Command } from 'commander';
import http from 'node:http';
import { execFile } from 'node:child_process';
import { promisify } from 'node:util';
import { loadConfig } from '../config/index.js';
import type { ConfigLoaderDeps } from '../config/index.js';
import type { ConfigLoaderDeps, LlmConfig } from '../config/index.js';
import { loadCredentials } from '../auth/index.js';
import type { CredentialsDeps } from '../auth/index.js';
import { formatJson, formatYaml } from '../formatters/index.js';
import { APP_VERSION } from '@mcpctl/shared';
const execFileAsync = promisify(execFile);
export interface StatusCommandDeps {
configDeps: Partial<ConfigLoaderDeps>;
credentialsDeps: Partial<CredentialsDeps>;
log: (...args: string[]) => void;
checkHealth: (url: string) => Promise<boolean>;
checkLlm: (llm: LlmConfig) => Promise<string>;
}
function defaultCheckHealth(url: string): Promise<boolean> {
@@ -28,15 +33,47 @@ function defaultCheckHealth(url: string): Promise<boolean> {
});
}
/**
* Quick LLM health check. Returns 'ok', 'binary not found', 'auth error', etc.
*/
async function defaultCheckLlm(llm: LlmConfig): Promise<string> {
if (llm.provider === 'gemini-cli') {
const bin = llm.binaryPath ?? 'gemini';
try {
const { stdout } = await execFileAsync(bin, ['-p', 'respond with exactly: ok', '-m', llm.model ?? 'gemini-2.5-flash', '-o', 'text'], { timeout: 15000 });
return stdout.trim().toLowerCase().includes('ok') ? 'ok' : 'unexpected response';
} catch (err) {
const msg = (err as Error).message;
if (msg.includes('ENOENT')) return 'binary not found';
if (msg.includes('auth') || msg.includes('token') || msg.includes('login') || msg.includes('401')) return 'not authenticated';
return `error: ${msg.slice(0, 80)}`;
}
}
if (llm.provider === 'ollama') {
const url = llm.url ?? 'http://localhost:11434';
try {
const ok = await defaultCheckHealth(url);
return ok ? 'ok' : 'unreachable';
} catch {
return 'unreachable';
}
}
// For API-key providers, we don't want to make a billable call on every status check
return 'ok (key stored)';
}
const defaultDeps: StatusCommandDeps = {
configDeps: {},
credentialsDeps: {},
log: (...args) => console.log(...args),
checkHealth: defaultCheckHealth,
checkLlm: defaultCheckLlm,
};
export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command {
const { configDeps, credentialsDeps, log, checkHealth } = { ...defaultDeps, ...deps };
const { configDeps, credentialsDeps, log, checkHealth, checkLlm } = { ...defaultDeps, ...deps };
return new Command('status')
.description('Show mcpctl status and connectivity')
@@ -45,13 +82,22 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
const config = loadConfig(configDeps);
const creds = loadCredentials(credentialsDeps);
const [mcplocalReachable, mcpdReachable] = await Promise.all([
const llmLabel = config.llm && config.llm.provider !== 'none'
? `${config.llm.provider}${config.llm.model ? ` / ${config.llm.model}` : ''}`
: null;
// Run health checks in parallel (include LLM check if configured)
const healthPromises: [Promise<boolean>, Promise<boolean>, Promise<string | null>] = [
checkHealth(config.mcplocalUrl),
checkHealth(config.mcpdUrl),
]);
config.llm && config.llm.provider !== 'none'
? checkLlm(config.llm)
: Promise.resolve(null),
];
const [mcplocalReachable, mcpdReachable, llmStatus] = await Promise.all(healthPromises);
const llm = config.llm && config.llm.provider !== 'none'
? `${config.llm.provider}${config.llm.model ? ` / ${config.llm.model}` : ''}`
const llm = llmLabel
? llmStatus === 'ok' ? llmLabel : `${llmLabel} (${llmStatus})`
: null;
const status = {
@@ -64,6 +110,7 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
registries: config.registries,
outputFormat: config.outputFormat,
llm,
llmStatus,
};
if (opts.output === 'json') {
@@ -78,7 +125,6 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
log(`Registries: ${status.registries.join(', ')}`);
log(`Output: ${status.outputFormat}`);
log(`LLM: ${status.llm ?? "not configured (run 'mcpctl config setup')"}`);
}
});
}

View File

@@ -42,6 +42,7 @@ function buildDeps(overrides: {
secrets?: Record<string, string>;
answers?: unknown[];
fetchModels?: ConfigSetupDeps['fetchModels'];
whichBinary?: ConfigSetupDeps['whichBinary'];
} = {}): ConfigSetupDeps {
return {
configDeps: { configDir: tempDir },
@@ -49,6 +50,7 @@ function buildDeps(overrides: {
log: (...args: string[]) => logs.push(args.join(' ')),
prompt: mockPrompt(overrides.answers ?? []),
fetchModels: overrides.fetchModels ?? vi.fn(async () => []),
whichBinary: overrides.whichBinary ?? vi.fn(async () => '/usr/bin/gemini'),
};
}
@@ -76,26 +78,49 @@ describe('config setup wizard', () => {
});
describe('provider: gemini-cli', () => {
it('saves gemini-cli with selected model', async () => {
// Answers: select provider, select model, confirm custom binary=false
const deps = buildDeps({ answers: ['gemini-cli', 'gemini-2.5-flash', false] });
it('auto-detects binary path and saves config', async () => {
// Answers: select provider, select model (no binary prompt — auto-detected)
const deps = buildDeps({
answers: ['gemini-cli', 'gemini-2.5-flash'],
whichBinary: vi.fn(async () => '/home/user/.npm-global/bin/gemini'),
});
await runSetup(deps);
const config = readConfig();
expect((config.llm as Record<string, unknown>).provider).toBe('gemini-cli');
expect((config.llm as Record<string, unknown>).model).toBe('gemini-2.5-flash');
const llm = config.llm as Record<string, unknown>;
expect(llm.provider).toBe('gemini-cli');
expect(llm.model).toBe('gemini-2.5-flash');
expect(llm.binaryPath).toBe('/home/user/.npm-global/bin/gemini');
expect(logs.some((l) => l.includes('Found gemini at'))).toBe(true);
cleanup();
});
it('saves gemini-cli with custom model and binary path', async () => {
// Answers: select provider, select custom, enter model name, confirm custom binary=true, enter path
const deps = buildDeps({ answers: ['gemini-cli', '__custom__', 'gemini-3.0-flash', true, '/opt/gemini'] });
it('prompts for manual path when binary not found', async () => {
// Answers: select provider, select model, enter manual path
const deps = buildDeps({
answers: ['gemini-cli', 'gemini-2.5-flash', '/opt/gemini'],
whichBinary: vi.fn(async () => null),
});
await runSetup(deps);
const config = readConfig();
const llm = config.llm as Record<string, unknown>;
expect(llm.binaryPath).toBe('/opt/gemini');
expect(logs.some((l) => l.includes('not found'))).toBe(true);
cleanup();
});
it('saves gemini-cli with custom model', async () => {
// Answers: select provider, select custom, enter model name
const deps = buildDeps({
answers: ['gemini-cli', '__custom__', 'gemini-3.0-flash'],
whichBinary: vi.fn(async () => '/usr/bin/gemini'),
});
await runSetup(deps);
const config = readConfig();
const llm = config.llm as Record<string, unknown>;
expect(llm.model).toBe('gemini-3.0-flash');
expect(llm.binaryPath).toBe('/opt/gemini');
cleanup();
});
});
@@ -250,7 +275,7 @@ describe('config setup wizard', () => {
describe('output messages', () => {
it('shows restart instruction', async () => {
const deps = buildDeps({ answers: ['gemini-cli', 'gemini-2.5-flash', false] });
const deps = buildDeps({ answers: ['gemini-cli', 'gemini-2.5-flash'] });
await runSetup(deps);
expect(logs.some((l) => l.includes('systemctl --user restart mcplocal'))).toBe(true);
@@ -258,7 +283,7 @@ describe('config setup wizard', () => {
});
it('shows configured provider and model', async () => {
const deps = buildDeps({ answers: ['gemini-cli', 'gemini-2.5-flash', false] });
const deps = buildDeps({ answers: ['gemini-cli', 'gemini-2.5-flash'] });
await runSetup(deps);
expect(logs.some((l) => l.includes('gemini-cli') && l.includes('gemini-2.5-flash'))).toBe(true);

View File

@@ -141,18 +141,48 @@ describe('status command', () => {
expect(out).toContain('mcpctl config setup');
});
it('shows configured LLM provider and model', async () => {
it('shows configured LLM provider and model when healthy', async () => {
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'anthropic', model: 'claude-haiku-3-5-20241022' } }, { configDir: tempDir });
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkHealth: async () => true,
checkLlm: async () => 'ok',
});
await cmd.parseAsync([], { from: 'user' });
const out = output.join('\n');
expect(out).toContain('LLM:');
expect(out).toContain('anthropic / claude-haiku-3-5-20241022');
// Should NOT show error status when ok
expect(out).not.toContain('(ok)');
});
it('shows LLM error status when check fails', async () => {
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkHealth: async () => true,
checkLlm: async () => 'not authenticated',
});
await cmd.parseAsync([], { from: 'user' });
const out = output.join('\n');
expect(out).toContain('gemini-cli / gemini-2.5-flash (not authenticated)');
});
it('shows binary not found status', async () => {
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkHealth: async () => true,
checkLlm: async () => 'binary not found',
});
await cmd.parseAsync([], { from: 'user' });
expect(output.join('\n')).toContain('(binary not found)');
});
it('shows not configured when LLM provider is none', async () => {
@@ -167,17 +197,19 @@ describe('status command', () => {
expect(output.join('\n')).toContain('not configured');
});
it('includes llm field in JSON output', async () => {
it('includes llm and llmStatus in JSON output', async () => {
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkHealth: async () => true,
checkLlm: async () => 'ok',
});
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
expect(parsed['llm']).toBe('gemini-cli / gemini-2.5-flash');
expect(parsed['llmStatus']).toBe('ok');
});
it('includes null llm in JSON output when not configured', async () => {
@@ -190,5 +222,6 @@ describe('status command', () => {
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
expect(parsed['llm']).toBeNull();
expect(parsed['llmStatus']).toBeNull();
});
});