diff --git a/src/cli/src/commands/config-setup.ts b/src/cli/src/commands/config-setup.ts new file mode 100644 index 0000000..2e1cb3a --- /dev/null +++ b/src/cli/src/commands/config-setup.ts @@ -0,0 +1,319 @@ +import { Command } from 'commander'; +import http from 'node:http'; +import https from 'node:https'; +import { loadConfig, saveConfig } from '../config/index.js'; +import type { ConfigLoaderDeps, McpctlConfig, LlmConfig, LlmProviderName } from '../config/index.js'; +import type { SecretStore } from '@mcpctl/shared'; +import { createSecretStore } from '@mcpctl/shared'; + +export interface ConfigSetupPrompt { + select(message: string, choices: Array<{ name: string; value: T; description?: string }>): Promise; + input(message: string, defaultValue?: string): Promise; + password(message: string): Promise; + confirm(message: string, defaultValue?: boolean): Promise; +} + +export interface ConfigSetupDeps { + configDeps: Partial; + secretStore: SecretStore; + log: (...args: string[]) => void; + prompt: ConfigSetupPrompt; + fetchModels: (url: string, path: string) => Promise; +} + +interface ProviderChoice { + name: string; + value: LlmProviderName; + description: string; +} + +const PROVIDER_CHOICES: ProviderChoice[] = [ + { name: 'Gemini CLI', value: 'gemini-cli', description: 'Google Gemini via local CLI (free, no API key)' }, + { name: 'Ollama', value: 'ollama', description: 'Local models via Ollama' }, + { name: 'Anthropic (Claude)', value: 'anthropic', description: 'Claude API (requires API key)' }, + { name: 'vLLM', value: 'vllm', description: 'Self-hosted vLLM (OpenAI-compatible)' }, + { name: 'OpenAI', value: 'openai', description: 'OpenAI API (requires API key)' }, + { name: 'DeepSeek', value: 'deepseek', description: 'DeepSeek API (requires API key)' }, + { name: 'None (disable)', value: 'none', description: 'Disable LLM features' }, +]; + +const GEMINI_MODELS = ['gemini-2.5-flash', 'gemini-2.5-pro', 'gemini-2.0-flash']; +const ANTHROPIC_MODELS = ['claude-haiku-3-5-20241022', 'claude-sonnet-4-20250514', 'claude-opus-4-20250514']; +const DEEPSEEK_MODELS = ['deepseek-chat', 'deepseek-reasoner']; + +function defaultFetchModels(baseUrl: string, path: string): Promise { + return new Promise((resolve) => { + const url = new URL(path, baseUrl); + const isHttps = url.protocol === 'https:'; + const transport = isHttps ? https : http; + + const req = transport.get({ + hostname: url.hostname, + port: url.port || (isHttps ? 443 : 80), + path: url.pathname, + timeout: 5000, + }, (res) => { + const chunks: Buffer[] = []; + res.on('data', (chunk: Buffer) => chunks.push(chunk)); + res.on('end', () => { + try { + const raw = Buffer.concat(chunks).toString('utf-8'); + const data = JSON.parse(raw) as { models?: Array<{ name: string }>; data?: Array<{ id: string }> }; + // Ollama format: { models: [{ name }] } + if (data.models) { + resolve(data.models.map((m) => m.name)); + return; + } + // OpenAI/vLLM format: { data: [{ id }] } + if (data.data) { + resolve(data.data.map((m) => m.id)); + return; + } + resolve([]); + } catch { + resolve([]); + } + }); + }); + req.on('error', () => resolve([])); + req.on('timeout', () => { req.destroy(); resolve([]); }); + }); +} + +async function defaultSelect(message: string, choices: Array<{ name: string; value: T; description?: string }>): Promise { + const { default: inquirer } = await import('inquirer'); + const { answer } = await inquirer.prompt([{ + type: 'list', + name: 'answer', + message, + choices: choices.map((c) => ({ + name: c.description ? `${c.name} — ${c.description}` : c.name, + value: c.value, + short: c.name, + })), + }]); + return answer as T; +} + +async function defaultInput(message: string, defaultValue?: string): Promise { + const { default: inquirer } = await import('inquirer'); + const { answer } = await inquirer.prompt([{ + type: 'input', + name: 'answer', + message, + default: defaultValue, + }]); + return answer as string; +} + +async function defaultPassword(message: string): Promise { + const { default: inquirer } = await import('inquirer'); + const { answer } = await inquirer.prompt([{ type: 'password', name: 'answer', message }]); + return answer as string; +} + +async function defaultConfirm(message: string, defaultValue?: boolean): Promise { + const { default: inquirer } = await import('inquirer'); + const { answer } = await inquirer.prompt([{ + type: 'confirm', + name: 'answer', + message, + default: defaultValue ?? true, + }]); + return answer as boolean; +} + +const defaultPrompt: ConfigSetupPrompt = { + select: defaultSelect, + input: defaultInput, + password: defaultPassword, + confirm: defaultConfirm, +}; + +export function createConfigSetupCommand(deps?: Partial): Command { + return new Command('setup') + .description('Interactive LLM provider setup wizard') + .action(async () => { + const configDeps = deps?.configDeps ?? {}; + const log = deps?.log ?? ((...args: string[]) => console.log(...args)); + const prompt = deps?.prompt ?? defaultPrompt; + const fetchModels = deps?.fetchModels ?? defaultFetchModels; + const secretStore = deps?.secretStore ?? await createSecretStore(); + + const config = loadConfig(configDeps); + const currentLlm = config.llm; + + // Annotate current provider in choices + const choices = PROVIDER_CHOICES.map((c) => { + if (currentLlm?.provider === c.value) { + return { ...c, name: `${c.name} (current)` }; + } + return c; + }); + + const provider = await prompt.select('Select LLM provider:', choices); + + if (provider === 'none') { + const updated: McpctlConfig = { ...config, llm: { provider: 'none' } }; + saveConfig(updated, configDeps); + log('LLM disabled. Restart mcplocal: systemctl --user restart mcplocal'); + return; + } + + let llmConfig: LlmConfig; + + switch (provider) { + case 'gemini-cli': + llmConfig = await setupGeminiCli(prompt, currentLlm); + break; + case 'ollama': + llmConfig = await setupOllama(prompt, fetchModels, currentLlm); + break; + case 'anthropic': + llmConfig = await setupApiKeyProvider(prompt, secretStore, 'anthropic', 'anthropic-api-key', ANTHROPIC_MODELS, currentLlm); + break; + case 'vllm': + llmConfig = await setupVllm(prompt, fetchModels, currentLlm); + break; + case 'openai': + llmConfig = await setupApiKeyProvider(prompt, secretStore, 'openai', 'openai-api-key', [], currentLlm); + break; + case 'deepseek': + llmConfig = await setupApiKeyProvider(prompt, secretStore, 'deepseek', 'deepseek-api-key', DEEPSEEK_MODELS, currentLlm); + break; + default: + return; + } + + const updated: McpctlConfig = { ...config, llm: llmConfig }; + saveConfig(updated, configDeps); + log(`\nLLM configured: ${llmConfig.provider}${llmConfig.model ? ` / ${llmConfig.model}` : ''}`); + log('Restart mcplocal: systemctl --user restart mcplocal'); + }); +} + +async function setupGeminiCli(prompt: ConfigSetupPrompt, current?: LlmConfig): Promise { + const model = await prompt.select('Select model:', [ + ...GEMINI_MODELS.map((m) => ({ + name: m === current?.model ? `${m} (current)` : m, + value: m, + })), + { name: 'Custom...', value: '__custom__' }, + ]); + + const finalModel = model === '__custom__' + ? await prompt.input('Model name:', current?.model) + : model; + + const customBinary = await prompt.confirm('Use custom binary path?', false); + const binaryPath = customBinary + ? await prompt.input('Binary path:', current?.binaryPath ?? 'gemini') + : undefined; + + return { provider: 'gemini-cli', model: finalModel, binaryPath }; +} + +async function setupOllama(prompt: ConfigSetupPrompt, fetchModels: ConfigSetupDeps['fetchModels'], current?: LlmConfig): Promise { + const url = await prompt.input('Ollama URL:', current?.url ?? 'http://localhost:11434'); + + // Try to fetch models from Ollama + const models = await fetchModels(url, '/api/tags'); + let model: string; + + if (models.length > 0) { + const choices = models.map((m) => ({ + name: m === current?.model ? `${m} (current)` : m, + value: m, + })); + choices.push({ name: 'Custom...', value: '__custom__' }); + model = await prompt.select('Select model:', choices); + if (model === '__custom__') { + model = await prompt.input('Model name:', current?.model); + } + } else { + model = await prompt.input('Model name (could not fetch models):', current?.model ?? 'llama3.2'); + } + + return { provider: 'ollama', model, url }; +} + +async function setupVllm(prompt: ConfigSetupPrompt, fetchModels: ConfigSetupDeps['fetchModels'], current?: LlmConfig): Promise { + const url = await prompt.input('vLLM URL:', current?.url ?? 'http://localhost:8000'); + + // Try to fetch models from vLLM (OpenAI-compatible) + const models = await fetchModels(url, '/v1/models'); + let model: string; + + if (models.length > 0) { + const choices = models.map((m) => ({ + name: m === current?.model ? `${m} (current)` : m, + value: m, + })); + choices.push({ name: 'Custom...', value: '__custom__' }); + model = await prompt.select('Select model:', choices); + if (model === '__custom__') { + model = await prompt.input('Model name:', current?.model); + } + } else { + model = await prompt.input('Model name (could not fetch models):', current?.model ?? 'default'); + } + + return { provider: 'vllm', model, url }; +} + +async function setupApiKeyProvider( + prompt: ConfigSetupPrompt, + secretStore: SecretStore, + provider: LlmProviderName, + secretKey: string, + hardcodedModels: string[], + current?: LlmConfig, +): Promise { + // Check for existing API key + const existingKey = await secretStore.get(secretKey); + let apiKey: string; + + if (existingKey) { + const masked = `****${existingKey.slice(-4)}`; + const changeKey = await prompt.confirm(`API key stored (${masked}). Change it?`, false); + if (changeKey) { + apiKey = await prompt.password('API key:'); + } else { + apiKey = existingKey; + } + } else { + apiKey = await prompt.password('API key:'); + } + + // Store API key + if (apiKey !== existingKey) { + await secretStore.set(secretKey, apiKey); + } + + // Model selection + let model: string; + if (hardcodedModels.length > 0) { + const choices = hardcodedModels.map((m) => ({ + name: m === current?.model ? `${m} (current)` : m, + value: m, + })); + choices.push({ name: 'Custom...', value: '__custom__' }); + model = await prompt.select('Select model:', choices); + if (model === '__custom__') { + model = await prompt.input('Model name:', current?.model); + } + } else { + model = await prompt.input('Model name:', current?.model ?? 'gpt-4o'); + } + + // Optional custom URL for openai + let url: string | undefined; + if (provider === 'openai') { + const customUrl = await prompt.confirm('Use custom API endpoint?', false); + if (customUrl) { + url = await prompt.input('API URL:', current?.url ?? 'https://api.openai.com'); + } + } + + return { provider, model, url }; +} diff --git a/src/cli/src/commands/config.ts b/src/cli/src/commands/config.ts index 7524776..099ae13 100644 --- a/src/cli/src/commands/config.ts +++ b/src/cli/src/commands/config.ts @@ -6,6 +6,7 @@ import { loadConfig, saveConfig, mergeConfig, getConfigPath, DEFAULT_CONFIG } fr import type { McpctlConfig, ConfigLoaderDeps } from '../config/index.js'; import { formatJson, formatYaml } from '../formatters/index.js'; import { saveCredentials, loadCredentials } from '../auth/index.js'; +import { createConfigSetupCommand } from './config-setup.js'; import type { CredentialsDeps, StoredCredentials } from '../auth/index.js'; import type { ApiClient } from '../api-client.js'; @@ -138,6 +139,8 @@ export function createConfigCommand(deps?: Partial, apiDeps?: registerClaudeCommand('claude', false); registerClaudeCommand('claude-generate', true); // backward compat + config.addCommand(createConfigSetupCommand({ configDeps })); + if (apiDeps) { const { client, credentialsDeps, log: apiLog } = apiDeps; diff --git a/src/cli/src/commands/status.ts b/src/cli/src/commands/status.ts index 6ea3e10..34c7e17 100644 --- a/src/cli/src/commands/status.ts +++ b/src/cli/src/commands/status.ts @@ -50,6 +50,10 @@ export function createStatusCommand(deps?: Partial): Command checkHealth(config.mcpdUrl), ]); + const llm = config.llm && config.llm.provider !== 'none' + ? `${config.llm.provider}${config.llm.model ? ` / ${config.llm.model}` : ''}` + : null; + const status = { version: APP_VERSION, mcplocalUrl: config.mcplocalUrl, @@ -59,6 +63,7 @@ export function createStatusCommand(deps?: Partial): Command auth: creds ? { user: creds.user } : null, registries: config.registries, outputFormat: config.outputFormat, + llm, }; if (opts.output === 'json') { @@ -72,6 +77,8 @@ export function createStatusCommand(deps?: Partial): Command log(`Auth: ${creds ? `logged in as ${creds.user}` : 'not logged in'}`); log(`Registries: ${status.registries.join(', ')}`); log(`Output: ${status.outputFormat}`); + log(`LLM: ${status.llm ?? "not configured (run 'mcpctl config setup')"}`); + } }); } diff --git a/src/cli/src/config/index.ts b/src/cli/src/config/index.ts index 8765cf8..b7d856a 100644 --- a/src/cli/src/config/index.ts +++ b/src/cli/src/config/index.ts @@ -1,4 +1,4 @@ -export { McpctlConfigSchema, DEFAULT_CONFIG } from './schema.js'; -export type { McpctlConfig } from './schema.js'; +export { McpctlConfigSchema, LlmConfigSchema, LLM_PROVIDERS, DEFAULT_CONFIG } from './schema.js'; +export type { McpctlConfig, LlmConfig, LlmProviderName } from './schema.js'; export { loadConfig, saveConfig, mergeConfig, getConfigPath } from './loader.js'; export type { ConfigLoaderDeps } from './loader.js'; diff --git a/src/cli/src/config/schema.ts b/src/cli/src/config/schema.ts index b398406..e923d8f 100644 --- a/src/cli/src/config/schema.ts +++ b/src/cli/src/config/schema.ts @@ -1,5 +1,21 @@ import { z } from 'zod'; +export const LLM_PROVIDERS = ['gemini-cli', 'ollama', 'anthropic', 'openai', 'deepseek', 'vllm', 'none'] as const; +export type LlmProviderName = typeof LLM_PROVIDERS[number]; + +export const LlmConfigSchema = z.object({ + /** LLM provider name */ + provider: z.enum(LLM_PROVIDERS), + /** Model name */ + model: z.string().optional(), + /** Provider URL (for ollama, vllm, openai with custom endpoint) */ + url: z.string().optional(), + /** Binary path override (for gemini-cli) */ + binaryPath: z.string().optional(), +}).strict(); + +export type LlmConfig = z.infer; + export const McpctlConfigSchema = z.object({ /** mcplocal daemon endpoint (local LLM pre-processing proxy) */ mcplocalUrl: z.string().default('http://localhost:3200'), @@ -19,6 +35,8 @@ export const McpctlConfigSchema = z.object({ outputFormat: z.enum(['table', 'json', 'yaml']).default('table'), /** Smithery API key */ smitheryApiKey: z.string().optional(), + /** LLM provider configuration for smart features (pagination summaries, etc.) */ + llm: LlmConfigSchema.optional(), }).transform((cfg) => { // Backward compatibility: if old daemonUrl is set but mcplocalUrl wasn't explicitly changed, // use daemonUrl as mcplocalUrl diff --git a/src/cli/tests/commands/config-setup.test.ts b/src/cli/tests/commands/config-setup.test.ts new file mode 100644 index 0000000..53347b4 --- /dev/null +++ b/src/cli/tests/commands/config-setup.test.ts @@ -0,0 +1,268 @@ +import { describe, it, expect, vi, beforeEach } from 'vitest'; +import { createConfigSetupCommand } from '../../src/commands/config-setup.js'; +import type { ConfigSetupDeps, ConfigSetupPrompt } from '../../src/commands/config-setup.js'; +import type { SecretStore } from '@mcpctl/shared'; +import { mkdtempSync, rmSync, readFileSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; + +let tempDir: string; +let logs: string[]; + +beforeEach(() => { + tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-config-setup-test-')); + logs = []; +}); + +function cleanup() { + rmSync(tempDir, { recursive: true, force: true }); +} + +function mockSecretStore(secrets: Record = {}): SecretStore { + const store: Record = { ...secrets }; + return { + get: vi.fn(async (key: string) => store[key] ?? null), + set: vi.fn(async (key: string, value: string) => { store[key] = value; }), + delete: vi.fn(async () => true), + backend: () => 'mock', + }; +} + +function mockPrompt(answers: unknown[]): ConfigSetupPrompt { + let callIndex = 0; + return { + select: vi.fn(async () => answers[callIndex++]), + input: vi.fn(async () => answers[callIndex++] as string), + password: vi.fn(async () => answers[callIndex++] as string), + confirm: vi.fn(async () => answers[callIndex++] as boolean), + }; +} + +function buildDeps(overrides: { + secrets?: Record; + answers?: unknown[]; + fetchModels?: ConfigSetupDeps['fetchModels']; +} = {}): ConfigSetupDeps { + return { + configDeps: { configDir: tempDir }, + secretStore: mockSecretStore(overrides.secrets), + log: (...args: string[]) => logs.push(args.join(' ')), + prompt: mockPrompt(overrides.answers ?? []), + fetchModels: overrides.fetchModels ?? vi.fn(async () => []), + }; +} + +function readConfig(): Record { + const raw = readFileSync(join(tempDir, 'config.json'), 'utf-8'); + return JSON.parse(raw) as Record; +} + +async function runSetup(deps: ConfigSetupDeps): Promise { + const cmd = createConfigSetupCommand(deps); + await cmd.parseAsync([], { from: 'user' }); +} + +describe('config setup wizard', () => { + describe('provider: none', () => { + it('disables LLM and saves config', async () => { + const deps = buildDeps({ answers: ['none'] }); + await runSetup(deps); + + const config = readConfig(); + expect(config.llm).toEqual({ provider: 'none' }); + expect(logs.some((l) => l.includes('LLM disabled'))).toBe(true); + cleanup(); + }); + }); + + describe('provider: gemini-cli', () => { + it('saves gemini-cli with selected model', async () => { + // Answers: select provider, select model, confirm custom binary=false + const deps = buildDeps({ answers: ['gemini-cli', 'gemini-2.5-flash', false] }); + await runSetup(deps); + + const config = readConfig(); + expect((config.llm as Record).provider).toBe('gemini-cli'); + expect((config.llm as Record).model).toBe('gemini-2.5-flash'); + cleanup(); + }); + + it('saves gemini-cli with custom model and binary path', async () => { + // Answers: select provider, select custom, enter model name, confirm custom binary=true, enter path + const deps = buildDeps({ answers: ['gemini-cli', '__custom__', 'gemini-3.0-flash', true, '/opt/gemini'] }); + await runSetup(deps); + + const config = readConfig(); + const llm = config.llm as Record; + expect(llm.model).toBe('gemini-3.0-flash'); + expect(llm.binaryPath).toBe('/opt/gemini'); + cleanup(); + }); + }); + + describe('provider: ollama', () => { + it('fetches models and allows selection', async () => { + const fetchModels = vi.fn(async () => ['llama3.2', 'codellama', 'mistral']); + // Answers: select provider, enter URL, select model + const deps = buildDeps({ + answers: ['ollama', 'http://localhost:11434', 'codellama'], + fetchModels, + }); + await runSetup(deps); + + expect(fetchModels).toHaveBeenCalledWith('http://localhost:11434', '/api/tags'); + const config = readConfig(); + const llm = config.llm as Record; + expect(llm.provider).toBe('ollama'); + expect(llm.model).toBe('codellama'); + expect(llm.url).toBe('http://localhost:11434'); + cleanup(); + }); + + it('falls back to manual input when fetch fails', async () => { + const fetchModels = vi.fn(async () => []); + // Answers: select provider, enter URL, enter model manually + const deps = buildDeps({ + answers: ['ollama', 'http://localhost:11434', 'llama3.2'], + fetchModels, + }); + await runSetup(deps); + + const config = readConfig(); + expect((config.llm as Record).model).toBe('llama3.2'); + cleanup(); + }); + }); + + describe('provider: anthropic', () => { + it('prompts for API key and saves to secret store', async () => { + // Answers: select provider, enter API key, select model + const deps = buildDeps({ + answers: ['anthropic', 'sk-ant-new-key', 'claude-haiku-3-5-20241022'], + }); + await runSetup(deps); + + expect(deps.secretStore.set).toHaveBeenCalledWith('anthropic-api-key', 'sk-ant-new-key'); + const config = readConfig(); + const llm = config.llm as Record; + expect(llm.provider).toBe('anthropic'); + expect(llm.model).toBe('claude-haiku-3-5-20241022'); + // API key should NOT be in config file + expect(llm).not.toHaveProperty('apiKey'); + cleanup(); + }); + + it('shows existing key masked and allows keeping it', async () => { + // Answers: select provider, confirm change=false, select model + const deps = buildDeps({ + secrets: { 'anthropic-api-key': 'sk-ant-existing-key-1234' }, + answers: ['anthropic', false, 'claude-sonnet-4-20250514'], + }); + await runSetup(deps); + + // Should NOT have called set (kept existing key) + expect(deps.secretStore.set).not.toHaveBeenCalled(); + const config = readConfig(); + expect((config.llm as Record).model).toBe('claude-sonnet-4-20250514'); + cleanup(); + }); + + it('allows replacing existing key', async () => { + // Answers: select provider, confirm change=true, enter new key, select model + const deps = buildDeps({ + secrets: { 'anthropic-api-key': 'sk-ant-old' }, + answers: ['anthropic', true, 'sk-ant-new', 'claude-haiku-3-5-20241022'], + }); + await runSetup(deps); + + expect(deps.secretStore.set).toHaveBeenCalledWith('anthropic-api-key', 'sk-ant-new'); + cleanup(); + }); + }); + + describe('provider: vllm', () => { + it('fetches models from vLLM and allows selection', async () => { + const fetchModels = vi.fn(async () => ['my-model', 'llama-70b']); + // Answers: select provider, enter URL, select model + const deps = buildDeps({ + answers: ['vllm', 'http://gpu:8000', 'llama-70b'], + fetchModels, + }); + await runSetup(deps); + + expect(fetchModels).toHaveBeenCalledWith('http://gpu:8000', '/v1/models'); + const config = readConfig(); + const llm = config.llm as Record; + expect(llm.provider).toBe('vllm'); + expect(llm.url).toBe('http://gpu:8000'); + expect(llm.model).toBe('llama-70b'); + cleanup(); + }); + }); + + describe('provider: openai', () => { + it('prompts for key, model, and optional custom endpoint', async () => { + // Answers: select provider, enter key, enter model, confirm custom URL=true, enter URL + const deps = buildDeps({ + answers: ['openai', 'sk-openai-key', 'gpt-4o', true, 'https://custom.api.com'], + }); + await runSetup(deps); + + expect(deps.secretStore.set).toHaveBeenCalledWith('openai-api-key', 'sk-openai-key'); + const config = readConfig(); + const llm = config.llm as Record; + expect(llm.provider).toBe('openai'); + expect(llm.model).toBe('gpt-4o'); + expect(llm.url).toBe('https://custom.api.com'); + cleanup(); + }); + + it('skips custom URL when not requested', async () => { + // Answers: select provider, enter key, enter model, confirm custom URL=false + const deps = buildDeps({ + answers: ['openai', 'sk-openai-key', 'gpt-4o-mini', false], + }); + await runSetup(deps); + + const config = readConfig(); + const llm = config.llm as Record; + expect(llm.url).toBeUndefined(); + cleanup(); + }); + }); + + describe('provider: deepseek', () => { + it('prompts for key and model', async () => { + // Answers: select provider, enter key, select model + const deps = buildDeps({ + answers: ['deepseek', 'sk-ds-key', 'deepseek-chat'], + }); + await runSetup(deps); + + expect(deps.secretStore.set).toHaveBeenCalledWith('deepseek-api-key', 'sk-ds-key'); + const config = readConfig(); + const llm = config.llm as Record; + expect(llm.provider).toBe('deepseek'); + expect(llm.model).toBe('deepseek-chat'); + cleanup(); + }); + }); + + describe('output messages', () => { + it('shows restart instruction', async () => { + const deps = buildDeps({ answers: ['gemini-cli', 'gemini-2.5-flash', false] }); + await runSetup(deps); + + expect(logs.some((l) => l.includes('systemctl --user restart mcplocal'))).toBe(true); + cleanup(); + }); + + it('shows configured provider and model', async () => { + const deps = buildDeps({ answers: ['gemini-cli', 'gemini-2.5-flash', false] }); + await runSetup(deps); + + expect(logs.some((l) => l.includes('gemini-cli') && l.includes('gemini-2.5-flash'))).toBe(true); + cleanup(); + }); + }); +}); diff --git a/src/cli/tests/commands/status.test.ts b/src/cli/tests/commands/status.test.ts index fc97dae..8616efa 100644 --- a/src/cli/tests/commands/status.test.ts +++ b/src/cli/tests/commands/status.test.ts @@ -126,4 +126,69 @@ describe('status command', () => { expect(output.join('\n')).toContain('official'); expect(output.join('\n')).not.toContain('glama'); }); + + it('shows LLM not configured hint when no LLM is set', async () => { + const cmd = createStatusCommand({ + configDeps: { configDir: tempDir }, + credentialsDeps: { configDir: tempDir }, + log, + checkHealth: async () => true, + }); + await cmd.parseAsync([], { from: 'user' }); + const out = output.join('\n'); + expect(out).toContain('LLM:'); + expect(out).toContain('not configured'); + expect(out).toContain('mcpctl config setup'); + }); + + it('shows configured LLM provider and model', async () => { + saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'anthropic', model: 'claude-haiku-3-5-20241022' } }, { configDir: tempDir }); + const cmd = createStatusCommand({ + configDeps: { configDir: tempDir }, + credentialsDeps: { configDir: tempDir }, + log, + checkHealth: async () => true, + }); + await cmd.parseAsync([], { from: 'user' }); + const out = output.join('\n'); + expect(out).toContain('LLM:'); + expect(out).toContain('anthropic / claude-haiku-3-5-20241022'); + }); + + it('shows not configured when LLM provider is none', async () => { + saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'none' } }, { configDir: tempDir }); + const cmd = createStatusCommand({ + configDeps: { configDir: tempDir }, + credentialsDeps: { configDir: tempDir }, + log, + checkHealth: async () => true, + }); + await cmd.parseAsync([], { from: 'user' }); + expect(output.join('\n')).toContain('not configured'); + }); + + it('includes llm field in JSON output', async () => { + saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir }); + const cmd = createStatusCommand({ + configDeps: { configDir: tempDir }, + credentialsDeps: { configDir: tempDir }, + log, + checkHealth: async () => true, + }); + await cmd.parseAsync(['-o', 'json'], { from: 'user' }); + const parsed = JSON.parse(output[0]) as Record; + expect(parsed['llm']).toBe('gemini-cli / gemini-2.5-flash'); + }); + + it('includes null llm in JSON output when not configured', async () => { + const cmd = createStatusCommand({ + configDeps: { configDir: tempDir }, + credentialsDeps: { configDir: tempDir }, + log, + checkHealth: async () => true, + }); + await cmd.parseAsync(['-o', 'json'], { from: 'user' }); + const parsed = JSON.parse(output[0]) as Record; + expect(parsed['llm']).toBeNull(); + }); }); diff --git a/src/mcplocal/src/http/config.ts b/src/mcplocal/src/http/config.ts index d298d9f..60a401b 100644 --- a/src/mcplocal/src/http/config.ts +++ b/src/mcplocal/src/http/config.ts @@ -37,6 +37,30 @@ function loadUserToken(): string { } } +export interface LlmFileConfig { + provider: string; + model?: string; + url?: string; + binaryPath?: string; +} + +/** + * Load LLM configuration from ~/.mcpctl/config.json. + * Returns undefined if no LLM section is configured. + */ +export function loadLlmConfig(): LlmFileConfig | undefined { + try { + const configPath = join(homedir(), '.mcpctl', 'config.json'); + if (!existsSync(configPath)) return undefined; + const raw = readFileSync(configPath, 'utf-8'); + const parsed = JSON.parse(raw) as { llm?: LlmFileConfig }; + if (!parsed.llm?.provider || parsed.llm.provider === 'none') return undefined; + return parsed.llm; + } catch { + return undefined; + } +} + export function loadHttpConfig(env: Record = process.env): HttpConfig { const portStr = env['MCPLOCAL_HTTP_PORT']; const port = portStr !== undefined ? parseInt(portStr, 10) : DEFAULT_HTTP_PORT; diff --git a/src/mcplocal/src/http/project-mcp-endpoint.ts b/src/mcplocal/src/http/project-mcp-endpoint.ts index eae0a91..dea9c2c 100644 --- a/src/mcplocal/src/http/project-mcp-endpoint.ts +++ b/src/mcplocal/src/http/project-mcp-endpoint.ts @@ -15,6 +15,7 @@ import { McpRouter } from '../router.js'; import { ResponsePaginator } from '../llm/pagination.js'; import { refreshProjectUpstreams } from '../discovery.js'; import type { McpdClient } from './mcpd-client.js'; +import type { ProviderRegistry } from '../providers/registry.js'; import type { JsonRpcRequest } from '../types.js'; interface ProjectCacheEntry { @@ -29,7 +30,7 @@ interface SessionEntry { const CACHE_TTL_MS = 60_000; // 60 seconds -export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: McpdClient): void { +export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: McpdClient, providerRegistry?: ProviderRegistry | null): void { const projectCache = new Map(); const sessions = new Map(); @@ -45,8 +46,8 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp const router = existing?.router ?? new McpRouter(); await refreshProjectUpstreams(router, mcpdClient, projectName, authToken); - // Wire pagination support (no LLM provider for now — simple index fallback) - router.setPaginator(new ResponsePaginator(null)); + // Wire pagination support with LLM provider if configured + router.setPaginator(new ResponsePaginator(providerRegistry?.getActive() ?? null)); // Configure prompt resources with SA-scoped client for RBAC const saClient = mcpdClient.withHeaders({ 'X-Service-Account': `project:${projectName}` }); diff --git a/src/mcplocal/src/http/server.ts b/src/mcplocal/src/http/server.ts index 01ab7fc..d8380bc 100644 --- a/src/mcplocal/src/http/server.ts +++ b/src/mcplocal/src/http/server.ts @@ -10,11 +10,13 @@ import { registerProjectMcpEndpoint } from './project-mcp-endpoint.js'; import type { McpRouter } from '../router.js'; import type { HealthMonitor } from '../health.js'; import type { TieredHealthMonitor } from '../health/tiered.js'; +import type { ProviderRegistry } from '../providers/registry.js'; export interface HttpServerDeps { router: McpRouter; healthMonitor?: HealthMonitor | undefined; tieredHealthMonitor?: TieredHealthMonitor | undefined; + providerRegistry?: ProviderRegistry | null | undefined; } export async function createHttpServer( @@ -87,7 +89,7 @@ export async function createHttpServer( registerMcpEndpoint(app, deps.router); // Project-scoped MCP endpoint at /projects/:projectName/mcp - registerProjectMcpEndpoint(app, mcpdClient); + registerProjectMcpEndpoint(app, mcpdClient, deps.providerRegistry); return app; } diff --git a/src/mcplocal/src/llm-config.ts b/src/mcplocal/src/llm-config.ts new file mode 100644 index 0000000..31f03c9 --- /dev/null +++ b/src/mcplocal/src/llm-config.ts @@ -0,0 +1,93 @@ +import type { SecretStore } from '@mcpctl/shared'; +import type { LlmFileConfig } from './http/config.js'; +import { ProviderRegistry } from './providers/registry.js'; +import { GeminiCliProvider } from './providers/gemini-cli.js'; +import { OllamaProvider } from './providers/ollama.js'; +import { AnthropicProvider } from './providers/anthropic.js'; +import { OpenAiProvider } from './providers/openai.js'; +import { DeepSeekProvider } from './providers/deepseek.js'; + +/** + * Create a ProviderRegistry from user config + secret store. + * Returns an empty registry if config is undefined or provider is 'none'. + */ +export async function createProviderFromConfig( + config: LlmFileConfig | undefined, + secretStore: SecretStore, +): Promise { + const registry = new ProviderRegistry(); + if (!config?.provider || config.provider === 'none') return registry; + + switch (config.provider) { + case 'gemini-cli': + registry.register(new GeminiCliProvider({ + binaryPath: config.binaryPath, + defaultModel: config.model, + })); + break; + + case 'ollama': + registry.register(new OllamaProvider({ + baseUrl: config.url, + defaultModel: config.model, + })); + break; + + case 'anthropic': { + const apiKey = await secretStore.get('anthropic-api-key'); + if (!apiKey) { + process.stderr.write('Warning: Anthropic API key not found in secret store. Run "mcpctl config setup" to configure.\n'); + return registry; + } + registry.register(new AnthropicProvider({ + apiKey, + defaultModel: config.model, + })); + break; + } + + case 'openai': { + const apiKey = await secretStore.get('openai-api-key'); + if (!apiKey) { + process.stderr.write('Warning: OpenAI API key not found in secret store. Run "mcpctl config setup" to configure.\n'); + return registry; + } + registry.register(new OpenAiProvider({ + apiKey, + baseUrl: config.url, + defaultModel: config.model, + })); + break; + } + + case 'deepseek': { + const apiKey = await secretStore.get('deepseek-api-key'); + if (!apiKey) { + process.stderr.write('Warning: DeepSeek API key not found in secret store. Run "mcpctl config setup" to configure.\n'); + return registry; + } + registry.register(new DeepSeekProvider({ + apiKey, + baseUrl: config.url, + defaultModel: config.model, + })); + break; + } + + case 'vllm': { + // vLLM uses OpenAI-compatible API + if (!config.url) { + process.stderr.write('Warning: vLLM URL not configured. Run "mcpctl config setup" to configure.\n'); + return registry; + } + registry.register(new OpenAiProvider({ + apiKey: 'unused', + baseUrl: config.url, + defaultModel: config.model ?? 'default', + })); + break; + } + } + + return registry; +} diff --git a/src/mcplocal/src/main.ts b/src/mcplocal/src/main.ts index 0e74edb..5473e74 100644 --- a/src/mcplocal/src/main.ts +++ b/src/mcplocal/src/main.ts @@ -7,8 +7,11 @@ import { StdioProxyServer } from './server.js'; import { StdioUpstream } from './upstream/stdio.js'; import { HttpUpstream } from './upstream/http.js'; import { createHttpServer } from './http/server.js'; -import { loadHttpConfig } from './http/config.js'; +import { loadHttpConfig, loadLlmConfig } from './http/config.js'; import type { HttpConfig } from './http/config.js'; +import { createProviderFromConfig } from './llm-config.js'; +import { createSecretStore } from '@mcpctl/shared'; +import type { ProviderRegistry } from './providers/registry.js'; interface ParsedArgs { configPath: string | undefined; @@ -55,12 +58,22 @@ export interface MainResult { server: StdioProxyServer; httpServer: FastifyInstance | undefined; httpConfig: HttpConfig; + providerRegistry: ProviderRegistry; } export async function main(argv: string[] = process.argv): Promise { const args = parseArgs(argv); const httpConfig = loadHttpConfig(); + // Load LLM provider from user config + secret store + const llmConfig = loadLlmConfig(); + const secretStore = await createSecretStore(); + const providerRegistry = await createProviderFromConfig(llmConfig, secretStore); + const activeLlm = providerRegistry.getActive(); + if (activeLlm) { + process.stderr.write(`LLM provider: ${activeLlm.name}\n`); + } + let upstreamConfigs: UpstreamConfig[] = []; if (args.configPath) { @@ -115,7 +128,7 @@ export async function main(argv: string[] = process.argv): Promise { // Start HTTP server unless disabled let httpServer: FastifyInstance | undefined; if (!args.noHttp) { - httpServer = await createHttpServer(httpConfig, { router }); + httpServer = await createHttpServer(httpConfig, { router, providerRegistry }); await httpServer.listen({ port: httpConfig.httpPort, host: httpConfig.httpHost }); process.stderr.write(`mcpctl-proxy HTTP server listening on ${httpConfig.httpHost}:${httpConfig.httpPort}\n`); } @@ -137,7 +150,7 @@ export async function main(argv: string[] = process.argv): Promise { process.on('SIGTERM', () => void shutdown()); process.on('SIGINT', () => void shutdown()); - return { router, server, httpServer, httpConfig }; + return { router, server, httpServer, httpConfig, providerRegistry }; } // Run when executed directly diff --git a/src/mcplocal/tests/http/config.test.ts b/src/mcplocal/tests/http/config.test.ts new file mode 100644 index 0000000..64bbee9 --- /dev/null +++ b/src/mcplocal/tests/http/config.test.ts @@ -0,0 +1,65 @@ +import { describe, it, expect, vi, afterEach } from 'vitest'; +import { loadLlmConfig } from '../../src/http/config.js'; +import { existsSync, readFileSync } from 'node:fs'; + +vi.mock('node:fs', async () => { + const actual = await vi.importActual('node:fs'); + return { + ...actual, + existsSync: vi.fn(), + readFileSync: vi.fn(), + }; +}); + +afterEach(() => { + vi.restoreAllMocks(); +}); + +describe('loadLlmConfig', () => { + it('returns undefined when config file does not exist', () => { + vi.mocked(existsSync).mockReturnValue(false); + expect(loadLlmConfig()).toBeUndefined(); + }); + + it('returns undefined when config has no llm section', () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFileSync).mockReturnValue(JSON.stringify({ mcplocalUrl: 'http://localhost:3200' })); + expect(loadLlmConfig()).toBeUndefined(); + }); + + it('returns undefined when provider is none', () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFileSync).mockReturnValue(JSON.stringify({ llm: { provider: 'none' } })); + expect(loadLlmConfig()).toBeUndefined(); + }); + + it('returns LLM config when provider is configured', () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFileSync).mockReturnValue(JSON.stringify({ + llm: { provider: 'anthropic', model: 'claude-haiku-3-5-20241022' }, + })); + const result = loadLlmConfig(); + expect(result).toEqual({ provider: 'anthropic', model: 'claude-haiku-3-5-20241022' }); + }); + + it('returns full LLM config with all fields', () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFileSync).mockReturnValue(JSON.stringify({ + llm: { provider: 'vllm', model: 'my-model', url: 'http://gpu:8000' }, + })); + const result = loadLlmConfig(); + expect(result).toEqual({ provider: 'vllm', model: 'my-model', url: 'http://gpu:8000' }); + }); + + it('returns undefined on malformed JSON', () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFileSync).mockReturnValue('NOT JSON!!!'); + expect(loadLlmConfig()).toBeUndefined(); + }); + + it('returns undefined on read error', () => { + vi.mocked(existsSync).mockReturnValue(true); + vi.mocked(readFileSync).mockImplementation(() => { throw new Error('EACCES'); }); + expect(loadLlmConfig()).toBeUndefined(); + }); +}); diff --git a/src/mcplocal/tests/llm-config.test.ts b/src/mcplocal/tests/llm-config.test.ts new file mode 100644 index 0000000..e1015ad --- /dev/null +++ b/src/mcplocal/tests/llm-config.test.ts @@ -0,0 +1,133 @@ +import { describe, it, expect, vi } from 'vitest'; +import { createProviderFromConfig } from '../src/llm-config.js'; +import type { SecretStore } from '@mcpctl/shared'; + +function mockSecretStore(secrets: Record = {}): SecretStore { + return { + get: vi.fn(async (key: string) => secrets[key] ?? null), + set: vi.fn(async () => {}), + delete: vi.fn(async () => true), + backend: () => 'mock', + }; +} + +describe('createProviderFromConfig', () => { + it('returns empty registry for undefined config', async () => { + const store = mockSecretStore(); + const registry = await createProviderFromConfig(undefined, store); + expect(registry.getActive()).toBeNull(); + expect(registry.list()).toEqual([]); + }); + + it('returns empty registry for provider=none', async () => { + const store = mockSecretStore(); + const registry = await createProviderFromConfig({ provider: 'none' }, store); + expect(registry.getActive()).toBeNull(); + }); + + it('creates gemini-cli provider', async () => { + const store = mockSecretStore(); + const registry = await createProviderFromConfig( + { provider: 'gemini-cli', model: 'gemini-2.5-flash', binaryPath: '/usr/bin/gemini' }, + store, + ); + expect(registry.getActive()).not.toBeNull(); + expect(registry.getActive()!.name).toBe('gemini-cli'); + }); + + it('creates ollama provider', async () => { + const store = mockSecretStore(); + const registry = await createProviderFromConfig( + { provider: 'ollama', model: 'llama3.2', url: 'http://localhost:11434' }, + store, + ); + expect(registry.getActive()!.name).toBe('ollama'); + }); + + it('creates anthropic provider with API key from secret store', async () => { + const store = mockSecretStore({ 'anthropic-api-key': 'sk-ant-test' }); + const registry = await createProviderFromConfig( + { provider: 'anthropic', model: 'claude-haiku-3-5-20241022' }, + store, + ); + expect(registry.getActive()!.name).toBe('anthropic'); + expect(store.get).toHaveBeenCalledWith('anthropic-api-key'); + }); + + it('returns empty registry when anthropic API key is missing', async () => { + const store = mockSecretStore(); + const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true); + const registry = await createProviderFromConfig( + { provider: 'anthropic', model: 'claude-haiku-3-5-20241022' }, + store, + ); + expect(registry.getActive()).toBeNull(); + expect(stderrSpy).toHaveBeenCalledWith(expect.stringContaining('Anthropic API key not found')); + stderrSpy.mockRestore(); + }); + + it('creates openai provider with API key from secret store', async () => { + const store = mockSecretStore({ 'openai-api-key': 'sk-test' }); + const registry = await createProviderFromConfig( + { provider: 'openai', model: 'gpt-4o', url: 'https://api.openai.com' }, + store, + ); + expect(registry.getActive()!.name).toBe('openai'); + expect(store.get).toHaveBeenCalledWith('openai-api-key'); + }); + + it('returns empty registry when openai API key is missing', async () => { + const store = mockSecretStore(); + const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true); + const registry = await createProviderFromConfig( + { provider: 'openai' }, + store, + ); + expect(registry.getActive()).toBeNull(); + stderrSpy.mockRestore(); + }); + + it('creates deepseek provider with API key from secret store', async () => { + const store = mockSecretStore({ 'deepseek-api-key': 'sk-ds-test' }); + const registry = await createProviderFromConfig( + { provider: 'deepseek', model: 'deepseek-chat' }, + store, + ); + expect(registry.getActive()!.name).toBe('deepseek'); + expect(store.get).toHaveBeenCalledWith('deepseek-api-key'); + }); + + it('returns empty registry when deepseek API key is missing', async () => { + const store = mockSecretStore(); + const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true); + const registry = await createProviderFromConfig( + { provider: 'deepseek' }, + store, + ); + expect(registry.getActive()).toBeNull(); + stderrSpy.mockRestore(); + }); + + it('creates vllm provider using OpenAI provider', async () => { + const store = mockSecretStore(); + const registry = await createProviderFromConfig( + { provider: 'vllm', model: 'my-model', url: 'http://gpu-server:8000' }, + store, + ); + // vLLM reuses OpenAI provider under the hood + expect(registry.getActive()).not.toBeNull(); + expect(registry.getActive()!.name).toBe('openai'); + }); + + it('returns empty registry when vllm URL is missing', async () => { + const store = mockSecretStore(); + const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true); + const registry = await createProviderFromConfig( + { provider: 'vllm' }, + store, + ); + expect(registry.getActive()).toBeNull(); + expect(stderrSpy).toHaveBeenCalledWith(expect.stringContaining('vLLM URL not configured')); + stderrSpy.mockRestore(); + }); +}); diff --git a/src/shared/src/index.ts b/src/shared/src/index.ts index 9f512e7..384dd73 100644 --- a/src/shared/src/index.ts +++ b/src/shared/src/index.ts @@ -2,3 +2,4 @@ export * from './types/index.js'; export * from './validation/index.js'; export * from './constants/index.js'; export * from './utils/index.js'; +export * from './secrets/index.js'; diff --git a/src/shared/src/secrets/file-store.ts b/src/shared/src/secrets/file-store.ts new file mode 100644 index 0000000..53d375f --- /dev/null +++ b/src/shared/src/secrets/file-store.ts @@ -0,0 +1,63 @@ +import { existsSync, mkdirSync, readFileSync, writeFileSync, chmodSync } from 'node:fs'; +import { join } from 'node:path'; +import { homedir } from 'node:os'; +import type { SecretStore, SecretStoreDeps } from './types.js'; + +function defaultConfigDir(): string { + return join(homedir(), '.mcpctl'); +} + +function secretsPath(configDir: string): string { + return join(configDir, 'secrets'); +} + +export class FileSecretStore implements SecretStore { + private readonly configDir: string; + + constructor(deps?: SecretStoreDeps) { + this.configDir = deps?.configDir ?? defaultConfigDir(); + } + + backend(): string { + return 'file'; + } + + async get(key: string): Promise { + const data = this.readAll(); + return data[key] ?? null; + } + + async set(key: string, value: string): Promise { + const data = this.readAll(); + data[key] = value; + this.writeAll(data); + } + + async delete(key: string): Promise { + const data = this.readAll(); + if (!(key in data)) return false; + delete data[key]; + this.writeAll(data); + return true; + } + + private readAll(): Record { + const path = secretsPath(this.configDir); + if (!existsSync(path)) return {}; + try { + const raw = readFileSync(path, 'utf-8'); + return JSON.parse(raw) as Record; + } catch { + return {}; + } + } + + private writeAll(data: Record): void { + if (!existsSync(this.configDir)) { + mkdirSync(this.configDir, { recursive: true }); + } + const path = secretsPath(this.configDir); + writeFileSync(path, JSON.stringify(data, null, 2) + '\n', 'utf-8'); + chmodSync(path, 0o600); + } +} diff --git a/src/shared/src/secrets/gnome-keyring.ts b/src/shared/src/secrets/gnome-keyring.ts new file mode 100644 index 0000000..810a3ed --- /dev/null +++ b/src/shared/src/secrets/gnome-keyring.ts @@ -0,0 +1,97 @@ +import { spawn } from 'node:child_process'; +import { execFile } from 'node:child_process'; +import { promisify } from 'node:util'; +import type { SecretStore } from './types.js'; + +const execFileAsync = promisify(execFile); +const SERVICE = 'mcpctl'; + +export type RunCommand = (cmd: string, args: string[], stdin?: string) => Promise<{ stdout: string; code: number }>; + +function defaultRunCommand(cmd: string, args: string[], stdin?: string): Promise<{ stdout: string; code: number }> { + return new Promise((resolve, reject) => { + const child = spawn(cmd, args, { + stdio: ['pipe', 'pipe', 'pipe'], + timeout: 5000, + }); + + const stdoutChunks: Buffer[] = []; + child.stdout.on('data', (chunk: Buffer) => stdoutChunks.push(chunk)); + + child.on('error', reject); + child.on('close', (code) => { + const stdout = Buffer.concat(stdoutChunks).toString('utf-8'); + resolve({ stdout, code: code ?? 1 }); + }); + + if (stdin !== undefined) { + child.stdin.write(stdin); + child.stdin.end(); + } else { + child.stdin.end(); + } + }); +} + +export interface GnomeKeyringDeps { + run?: RunCommand; +} + +export class GnomeKeyringStore implements SecretStore { + private readonly run: RunCommand; + + constructor(deps?: GnomeKeyringDeps) { + this.run = deps?.run ?? defaultRunCommand; + } + + backend(): string { + return 'gnome-keyring'; + } + + async get(key: string): Promise { + try { + const { stdout, code } = await this.run( + 'secret-tool', ['lookup', 'service', SERVICE, 'key', key], + ); + if (code !== 0 || !stdout) return null; + return stdout; + } catch { + return null; + } + } + + async set(key: string, value: string): Promise { + const { code } = await this.run( + 'secret-tool', + ['store', '--label', `mcpctl: ${key}`, 'service', SERVICE, 'key', key], + value, + ); + if (code !== 0) { + throw new Error(`secret-tool store exited with code ${code}`); + } + } + + async delete(key: string): Promise { + try { + const { code } = await this.run( + 'secret-tool', ['clear', 'service', SERVICE, 'key', key], + ); + return code === 0; + } catch { + return false; + } + } + + static async isAvailable(deps?: { run?: RunCommand }): Promise { + try { + if (deps?.run) { + const { code } = await deps.run('secret-tool', ['--version']); + return code === 0; + } + await execFileAsync('secret-tool', ['--version'], { timeout: 3000 }); + return true; + } catch { + return false; + } + } +} diff --git a/src/shared/src/secrets/index.ts b/src/shared/src/secrets/index.ts new file mode 100644 index 0000000..599bcab --- /dev/null +++ b/src/shared/src/secrets/index.ts @@ -0,0 +1,15 @@ +export type { SecretStore, SecretStoreDeps } from './types.js'; +export { FileSecretStore } from './file-store.js'; +export { GnomeKeyringStore } from './gnome-keyring.js'; +export type { GnomeKeyringDeps, RunCommand } from './gnome-keyring.js'; + +import { GnomeKeyringStore } from './gnome-keyring.js'; +import { FileSecretStore } from './file-store.js'; +import type { SecretStore, SecretStoreDeps } from './types.js'; + +export async function createSecretStore(deps?: SecretStoreDeps): Promise { + if (await GnomeKeyringStore.isAvailable()) { + return new GnomeKeyringStore(); + } + return new FileSecretStore(deps); +} diff --git a/src/shared/src/secrets/types.ts b/src/shared/src/secrets/types.ts new file mode 100644 index 0000000..083e5f3 --- /dev/null +++ b/src/shared/src/secrets/types.ts @@ -0,0 +1,10 @@ +export interface SecretStore { + get(key: string): Promise; + set(key: string, value: string): Promise; + delete(key: string): Promise; + backend(): string; +} + +export interface SecretStoreDeps { + configDir?: string; +} diff --git a/src/shared/tests/secrets/factory.test.ts b/src/shared/tests/secrets/factory.test.ts new file mode 100644 index 0000000..124e2eb --- /dev/null +++ b/src/shared/tests/secrets/factory.test.ts @@ -0,0 +1,24 @@ +import { describe, it, expect, vi, afterEach } from 'vitest'; +import { createSecretStore } from '../../src/secrets/index.js'; +import { GnomeKeyringStore } from '../../src/secrets/gnome-keyring.js'; +import { FileSecretStore } from '../../src/secrets/file-store.js'; + +afterEach(() => { + vi.restoreAllMocks(); +}); + +describe('createSecretStore', () => { + it('returns GnomeKeyringStore when secret-tool is available', async () => { + vi.spyOn(GnomeKeyringStore, 'isAvailable').mockResolvedValue(true); + const store = await createSecretStore(); + expect(store.backend()).toBe('gnome-keyring'); + expect(store).toBeInstanceOf(GnomeKeyringStore); + }); + + it('returns FileSecretStore when secret-tool is not available', async () => { + vi.spyOn(GnomeKeyringStore, 'isAvailable').mockResolvedValue(false); + const store = await createSecretStore(); + expect(store.backend()).toBe('file'); + expect(store).toBeInstanceOf(FileSecretStore); + }); +}); diff --git a/src/shared/tests/secrets/file-store.test.ts b/src/shared/tests/secrets/file-store.test.ts new file mode 100644 index 0000000..df3a737 --- /dev/null +++ b/src/shared/tests/secrets/file-store.test.ts @@ -0,0 +1,93 @@ +import { describe, it, expect, beforeEach, afterEach } from 'vitest'; +import { mkdtempSync, rmSync, statSync, existsSync, writeFileSync } from 'node:fs'; +import { join } from 'node:path'; +import { tmpdir } from 'node:os'; +import { FileSecretStore } from '../../src/secrets/file-store.js'; + +let tempDir: string; + +beforeEach(() => { + tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-secrets-test-')); +}); + +afterEach(() => { + rmSync(tempDir, { recursive: true, force: true }); +}); + +describe('FileSecretStore', () => { + it('returns null for missing key', async () => { + const store = new FileSecretStore({ configDir: tempDir }); + expect(await store.get('nonexistent')).toBeNull(); + }); + + it('stores and retrieves a secret', async () => { + const store = new FileSecretStore({ configDir: tempDir }); + await store.set('api-key', 'sk-12345'); + expect(await store.get('api-key')).toBe('sk-12345'); + }); + + it('overwrites existing values', async () => { + const store = new FileSecretStore({ configDir: tempDir }); + await store.set('api-key', 'old-value'); + await store.set('api-key', 'new-value'); + expect(await store.get('api-key')).toBe('new-value'); + }); + + it('stores multiple keys', async () => { + const store = new FileSecretStore({ configDir: tempDir }); + await store.set('key-a', 'value-a'); + await store.set('key-b', 'value-b'); + expect(await store.get('key-a')).toBe('value-a'); + expect(await store.get('key-b')).toBe('value-b'); + }); + + it('deletes a key', async () => { + const store = new FileSecretStore({ configDir: tempDir }); + await store.set('api-key', 'sk-12345'); + expect(await store.delete('api-key')).toBe(true); + expect(await store.get('api-key')).toBeNull(); + }); + + it('returns false when deleting nonexistent key', async () => { + const store = new FileSecretStore({ configDir: tempDir }); + expect(await store.delete('nonexistent')).toBe(false); + }); + + it('sets 0600 permissions on secrets file', async () => { + const store = new FileSecretStore({ configDir: tempDir }); + await store.set('api-key', 'sk-12345'); + const stat = statSync(join(tempDir, 'secrets')); + expect(stat.mode & 0o777).toBe(0o600); + }); + + it('creates config dir if missing', async () => { + const nested = join(tempDir, 'sub', 'dir'); + const store = new FileSecretStore({ configDir: nested }); + await store.set('api-key', 'sk-12345'); + expect(existsSync(join(nested, 'secrets'))).toBe(true); + }); + + it('recovers from corrupted JSON', async () => { + writeFileSync(join(tempDir, 'secrets'), 'NOT JSON!!!', 'utf-8'); + const store = new FileSecretStore({ configDir: tempDir }); + // Should not throw, returns null for any key + expect(await store.get('api-key')).toBeNull(); + // Should be able to write over corrupted file + await store.set('api-key', 'fresh-value'); + expect(await store.get('api-key')).toBe('fresh-value'); + }); + + it('reports file backend', () => { + const store = new FileSecretStore({ configDir: tempDir }); + expect(store.backend()).toBe('file'); + }); + + it('preserves other keys on delete', async () => { + const store = new FileSecretStore({ configDir: tempDir }); + await store.set('key-a', 'value-a'); + await store.set('key-b', 'value-b'); + await store.delete('key-a'); + expect(await store.get('key-a')).toBeNull(); + expect(await store.get('key-b')).toBe('value-b'); + }); +}); diff --git a/src/shared/tests/secrets/gnome-keyring.test.ts b/src/shared/tests/secrets/gnome-keyring.test.ts new file mode 100644 index 0000000..7660952 --- /dev/null +++ b/src/shared/tests/secrets/gnome-keyring.test.ts @@ -0,0 +1,125 @@ +import { describe, it, expect, vi } from 'vitest'; +import { GnomeKeyringStore } from '../../src/secrets/gnome-keyring.js'; +import type { RunCommand } from '../../src/secrets/gnome-keyring.js'; + +function mockRun( + responses: Record, +): RunCommand { + return vi.fn(async (cmd: string, args: string[], _stdin?: string) => { + const key = `${cmd} ${args.join(' ')}`; + for (const [pattern, response] of Object.entries(responses)) { + if (key.includes(pattern)) return response; + } + return { stdout: '', code: 1 }; + }); +} + +describe('GnomeKeyringStore', () => { + describe('get', () => { + it('returns value on success', async () => { + const run = mockRun({ 'lookup': { stdout: 'my-secret', code: 0 } }); + const store = new GnomeKeyringStore({ run }); + expect(await store.get('api-key')).toBe('my-secret'); + }); + + it('returns null on exit code 1', async () => { + const run = mockRun({ 'lookup': { stdout: '', code: 1 } }); + const store = new GnomeKeyringStore({ run }); + expect(await store.get('api-key')).toBeNull(); + }); + + it('returns null on empty stdout', async () => { + const run = mockRun({ 'lookup': { stdout: '', code: 0 } }); + const store = new GnomeKeyringStore({ run }); + expect(await store.get('api-key')).toBeNull(); + }); + + it('returns null on error', async () => { + const run = vi.fn().mockRejectedValue(new Error('timeout')); + const store = new GnomeKeyringStore({ run }); + expect(await store.get('api-key')).toBeNull(); + }); + + it('calls secret-tool with correct args', async () => { + const run = vi.fn().mockResolvedValue({ stdout: 'val', code: 0 }); + const store = new GnomeKeyringStore({ run }); + await store.get('my-key'); + expect(run).toHaveBeenCalledWith( + 'secret-tool', + ['lookup', 'service', 'mcpctl', 'key', 'my-key'], + ); + }); + }); + + describe('set', () => { + it('calls secret-tool store with value as stdin', async () => { + const run = vi.fn().mockResolvedValue({ stdout: '', code: 0 }); + const store = new GnomeKeyringStore({ run }); + await store.set('api-key', 'secret-value'); + expect(run).toHaveBeenCalledWith( + 'secret-tool', + ['store', '--label', 'mcpctl: api-key', 'service', 'mcpctl', 'key', 'api-key'], + 'secret-value', + ); + }); + + it('throws on non-zero exit code', async () => { + const run = vi.fn().mockResolvedValue({ stdout: '', code: 1 }); + const store = new GnomeKeyringStore({ run }); + await expect(store.set('api-key', 'val')).rejects.toThrow('exited with code 1'); + }); + }); + + describe('delete', () => { + it('returns true on success', async () => { + const run = mockRun({ 'clear': { stdout: '', code: 0 } }); + const store = new GnomeKeyringStore({ run }); + expect(await store.delete('api-key')).toBe(true); + }); + + it('returns false on failure', async () => { + const run = mockRun({ 'clear': { stdout: '', code: 1 } }); + const store = new GnomeKeyringStore({ run }); + expect(await store.delete('api-key')).toBe(false); + }); + + it('returns false on error', async () => { + const run = vi.fn().mockRejectedValue(new Error('fail')); + const store = new GnomeKeyringStore({ run }); + expect(await store.delete('api-key')).toBe(false); + }); + + it('calls secret-tool clear with correct args', async () => { + const run = vi.fn().mockResolvedValue({ stdout: '', code: 0 }); + const store = new GnomeKeyringStore({ run }); + await store.delete('my-key'); + expect(run).toHaveBeenCalledWith( + 'secret-tool', + ['clear', 'service', 'mcpctl', 'key', 'my-key'], + ); + }); + }); + + describe('isAvailable', () => { + it('returns true when secret-tool exists', async () => { + const run = vi.fn().mockResolvedValue({ stdout: '0.20', code: 0 }); + expect(await GnomeKeyringStore.isAvailable({ run })).toBe(true); + }); + + it('returns false when secret-tool not found', async () => { + const run = vi.fn().mockRejectedValue(new Error('ENOENT')); + expect(await GnomeKeyringStore.isAvailable({ run })).toBe(false); + }); + + it('returns false on non-zero exit', async () => { + const run = vi.fn().mockResolvedValue({ stdout: '', code: 127 }); + expect(await GnomeKeyringStore.isAvailable({ run })).toBe(false); + }); + }); + + it('reports gnome-keyring backend', () => { + const run = vi.fn().mockResolvedValue({ stdout: '', code: 0 }); + const store = new GnomeKeyringStore({ run }); + expect(store.backend()).toBe('gnome-keyring'); + }); +});