feat: LLM provider configuration, secret store, and setup wizard
Some checks failed
CI / lint (pull_request) Has been cancelled
CI / typecheck (pull_request) Has been cancelled
CI / test (pull_request) Has been cancelled
CI / build (pull_request) Has been cancelled
CI / package (pull_request) Has been cancelled

Add secure credential storage (GNOME Keyring + file fallback),
LLM provider config in ~/.mcpctl/config.json, interactive setup
wizard (mcpctl config setup), and wire configured provider into
mcplocal for smart pagination summaries.

- Secret store: SecretStore interface, GnomeKeyringStore, FileSecretStore
- Config schema: LlmConfigSchema with provider/model/url/binaryPath
- Setup wizard: arrow-key provider/model selection, dynamic model fetch
- Provider factory: creates ProviderRegistry from config + secrets
- Status: shows LLM line with hint when not configured
- 572 tests passing across all packages

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Michal
2026-02-24 22:48:17 +00:00
parent 32b4de4343
commit 7c23da10c6
22 changed files with 1448 additions and 9 deletions

View File

@@ -37,6 +37,30 @@ function loadUserToken(): string {
}
}
export interface LlmFileConfig {
provider: string;
model?: string;
url?: string;
binaryPath?: string;
}
/**
* Load LLM configuration from ~/.mcpctl/config.json.
* Returns undefined if no LLM section is configured.
*/
export function loadLlmConfig(): LlmFileConfig | undefined {
try {
const configPath = join(homedir(), '.mcpctl', 'config.json');
if (!existsSync(configPath)) return undefined;
const raw = readFileSync(configPath, 'utf-8');
const parsed = JSON.parse(raw) as { llm?: LlmFileConfig };
if (!parsed.llm?.provider || parsed.llm.provider === 'none') return undefined;
return parsed.llm;
} catch {
return undefined;
}
}
export function loadHttpConfig(env: Record<string, string | undefined> = process.env): HttpConfig {
const portStr = env['MCPLOCAL_HTTP_PORT'];
const port = portStr !== undefined ? parseInt(portStr, 10) : DEFAULT_HTTP_PORT;

View File

@@ -15,6 +15,7 @@ import { McpRouter } from '../router.js';
import { ResponsePaginator } from '../llm/pagination.js';
import { refreshProjectUpstreams } from '../discovery.js';
import type { McpdClient } from './mcpd-client.js';
import type { ProviderRegistry } from '../providers/registry.js';
import type { JsonRpcRequest } from '../types.js';
interface ProjectCacheEntry {
@@ -29,7 +30,7 @@ interface SessionEntry {
const CACHE_TTL_MS = 60_000; // 60 seconds
export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: McpdClient): void {
export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: McpdClient, providerRegistry?: ProviderRegistry | null): void {
const projectCache = new Map<string, ProjectCacheEntry>();
const sessions = new Map<string, SessionEntry>();
@@ -45,8 +46,8 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
const router = existing?.router ?? new McpRouter();
await refreshProjectUpstreams(router, mcpdClient, projectName, authToken);
// Wire pagination support (no LLM provider for now — simple index fallback)
router.setPaginator(new ResponsePaginator(null));
// Wire pagination support with LLM provider if configured
router.setPaginator(new ResponsePaginator(providerRegistry?.getActive() ?? null));
// Configure prompt resources with SA-scoped client for RBAC
const saClient = mcpdClient.withHeaders({ 'X-Service-Account': `project:${projectName}` });

View File

@@ -10,11 +10,13 @@ import { registerProjectMcpEndpoint } from './project-mcp-endpoint.js';
import type { McpRouter } from '../router.js';
import type { HealthMonitor } from '../health.js';
import type { TieredHealthMonitor } from '../health/tiered.js';
import type { ProviderRegistry } from '../providers/registry.js';
export interface HttpServerDeps {
router: McpRouter;
healthMonitor?: HealthMonitor | undefined;
tieredHealthMonitor?: TieredHealthMonitor | undefined;
providerRegistry?: ProviderRegistry | null | undefined;
}
export async function createHttpServer(
@@ -87,7 +89,7 @@ export async function createHttpServer(
registerMcpEndpoint(app, deps.router);
// Project-scoped MCP endpoint at /projects/:projectName/mcp
registerProjectMcpEndpoint(app, mcpdClient);
registerProjectMcpEndpoint(app, mcpdClient, deps.providerRegistry);
return app;
}

View File

@@ -0,0 +1,93 @@
import type { SecretStore } from '@mcpctl/shared';
import type { LlmFileConfig } from './http/config.js';
import { ProviderRegistry } from './providers/registry.js';
import { GeminiCliProvider } from './providers/gemini-cli.js';
import { OllamaProvider } from './providers/ollama.js';
import { AnthropicProvider } from './providers/anthropic.js';
import { OpenAiProvider } from './providers/openai.js';
import { DeepSeekProvider } from './providers/deepseek.js';
/**
* Create a ProviderRegistry from user config + secret store.
* Returns an empty registry if config is undefined or provider is 'none'.
*/
export async function createProviderFromConfig(
config: LlmFileConfig | undefined,
secretStore: SecretStore,
): Promise<ProviderRegistry> {
const registry = new ProviderRegistry();
if (!config?.provider || config.provider === 'none') return registry;
switch (config.provider) {
case 'gemini-cli':
registry.register(new GeminiCliProvider({
binaryPath: config.binaryPath,
defaultModel: config.model,
}));
break;
case 'ollama':
registry.register(new OllamaProvider({
baseUrl: config.url,
defaultModel: config.model,
}));
break;
case 'anthropic': {
const apiKey = await secretStore.get('anthropic-api-key');
if (!apiKey) {
process.stderr.write('Warning: Anthropic API key not found in secret store. Run "mcpctl config setup" to configure.\n');
return registry;
}
registry.register(new AnthropicProvider({
apiKey,
defaultModel: config.model,
}));
break;
}
case 'openai': {
const apiKey = await secretStore.get('openai-api-key');
if (!apiKey) {
process.stderr.write('Warning: OpenAI API key not found in secret store. Run "mcpctl config setup" to configure.\n');
return registry;
}
registry.register(new OpenAiProvider({
apiKey,
baseUrl: config.url,
defaultModel: config.model,
}));
break;
}
case 'deepseek': {
const apiKey = await secretStore.get('deepseek-api-key');
if (!apiKey) {
process.stderr.write('Warning: DeepSeek API key not found in secret store. Run "mcpctl config setup" to configure.\n');
return registry;
}
registry.register(new DeepSeekProvider({
apiKey,
baseUrl: config.url,
defaultModel: config.model,
}));
break;
}
case 'vllm': {
// vLLM uses OpenAI-compatible API
if (!config.url) {
process.stderr.write('Warning: vLLM URL not configured. Run "mcpctl config setup" to configure.\n');
return registry;
}
registry.register(new OpenAiProvider({
apiKey: 'unused',
baseUrl: config.url,
defaultModel: config.model ?? 'default',
}));
break;
}
}
return registry;
}

View File

@@ -7,8 +7,11 @@ import { StdioProxyServer } from './server.js';
import { StdioUpstream } from './upstream/stdio.js';
import { HttpUpstream } from './upstream/http.js';
import { createHttpServer } from './http/server.js';
import { loadHttpConfig } from './http/config.js';
import { loadHttpConfig, loadLlmConfig } from './http/config.js';
import type { HttpConfig } from './http/config.js';
import { createProviderFromConfig } from './llm-config.js';
import { createSecretStore } from '@mcpctl/shared';
import type { ProviderRegistry } from './providers/registry.js';
interface ParsedArgs {
configPath: string | undefined;
@@ -55,12 +58,22 @@ export interface MainResult {
server: StdioProxyServer;
httpServer: FastifyInstance | undefined;
httpConfig: HttpConfig;
providerRegistry: ProviderRegistry;
}
export async function main(argv: string[] = process.argv): Promise<MainResult> {
const args = parseArgs(argv);
const httpConfig = loadHttpConfig();
// Load LLM provider from user config + secret store
const llmConfig = loadLlmConfig();
const secretStore = await createSecretStore();
const providerRegistry = await createProviderFromConfig(llmConfig, secretStore);
const activeLlm = providerRegistry.getActive();
if (activeLlm) {
process.stderr.write(`LLM provider: ${activeLlm.name}\n`);
}
let upstreamConfigs: UpstreamConfig[] = [];
if (args.configPath) {
@@ -115,7 +128,7 @@ export async function main(argv: string[] = process.argv): Promise<MainResult> {
// Start HTTP server unless disabled
let httpServer: FastifyInstance | undefined;
if (!args.noHttp) {
httpServer = await createHttpServer(httpConfig, { router });
httpServer = await createHttpServer(httpConfig, { router, providerRegistry });
await httpServer.listen({ port: httpConfig.httpPort, host: httpConfig.httpHost });
process.stderr.write(`mcpctl-proxy HTTP server listening on ${httpConfig.httpHost}:${httpConfig.httpPort}\n`);
}
@@ -137,7 +150,7 @@ export async function main(argv: string[] = process.argv): Promise<MainResult> {
process.on('SIGTERM', () => void shutdown());
process.on('SIGINT', () => void shutdown());
return { router, server, httpServer, httpConfig };
return { router, server, httpServer, httpConfig, providerRegistry };
}
// Run when executed directly

View File

@@ -0,0 +1,65 @@
import { describe, it, expect, vi, afterEach } from 'vitest';
import { loadLlmConfig } from '../../src/http/config.js';
import { existsSync, readFileSync } from 'node:fs';
vi.mock('node:fs', async () => {
const actual = await vi.importActual<typeof import('node:fs')>('node:fs');
return {
...actual,
existsSync: vi.fn(),
readFileSync: vi.fn(),
};
});
afterEach(() => {
vi.restoreAllMocks();
});
describe('loadLlmConfig', () => {
it('returns undefined when config file does not exist', () => {
vi.mocked(existsSync).mockReturnValue(false);
expect(loadLlmConfig()).toBeUndefined();
});
it('returns undefined when config has no llm section', () => {
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(readFileSync).mockReturnValue(JSON.stringify({ mcplocalUrl: 'http://localhost:3200' }));
expect(loadLlmConfig()).toBeUndefined();
});
it('returns undefined when provider is none', () => {
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(readFileSync).mockReturnValue(JSON.stringify({ llm: { provider: 'none' } }));
expect(loadLlmConfig()).toBeUndefined();
});
it('returns LLM config when provider is configured', () => {
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(readFileSync).mockReturnValue(JSON.stringify({
llm: { provider: 'anthropic', model: 'claude-haiku-3-5-20241022' },
}));
const result = loadLlmConfig();
expect(result).toEqual({ provider: 'anthropic', model: 'claude-haiku-3-5-20241022' });
});
it('returns full LLM config with all fields', () => {
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(readFileSync).mockReturnValue(JSON.stringify({
llm: { provider: 'vllm', model: 'my-model', url: 'http://gpu:8000' },
}));
const result = loadLlmConfig();
expect(result).toEqual({ provider: 'vllm', model: 'my-model', url: 'http://gpu:8000' });
});
it('returns undefined on malformed JSON', () => {
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(readFileSync).mockReturnValue('NOT JSON!!!');
expect(loadLlmConfig()).toBeUndefined();
});
it('returns undefined on read error', () => {
vi.mocked(existsSync).mockReturnValue(true);
vi.mocked(readFileSync).mockImplementation(() => { throw new Error('EACCES'); });
expect(loadLlmConfig()).toBeUndefined();
});
});

View File

@@ -0,0 +1,133 @@
import { describe, it, expect, vi } from 'vitest';
import { createProviderFromConfig } from '../src/llm-config.js';
import type { SecretStore } from '@mcpctl/shared';
function mockSecretStore(secrets: Record<string, string> = {}): SecretStore {
return {
get: vi.fn(async (key: string) => secrets[key] ?? null),
set: vi.fn(async () => {}),
delete: vi.fn(async () => true),
backend: () => 'mock',
};
}
describe('createProviderFromConfig', () => {
it('returns empty registry for undefined config', async () => {
const store = mockSecretStore();
const registry = await createProviderFromConfig(undefined, store);
expect(registry.getActive()).toBeNull();
expect(registry.list()).toEqual([]);
});
it('returns empty registry for provider=none', async () => {
const store = mockSecretStore();
const registry = await createProviderFromConfig({ provider: 'none' }, store);
expect(registry.getActive()).toBeNull();
});
it('creates gemini-cli provider', async () => {
const store = mockSecretStore();
const registry = await createProviderFromConfig(
{ provider: 'gemini-cli', model: 'gemini-2.5-flash', binaryPath: '/usr/bin/gemini' },
store,
);
expect(registry.getActive()).not.toBeNull();
expect(registry.getActive()!.name).toBe('gemini-cli');
});
it('creates ollama provider', async () => {
const store = mockSecretStore();
const registry = await createProviderFromConfig(
{ provider: 'ollama', model: 'llama3.2', url: 'http://localhost:11434' },
store,
);
expect(registry.getActive()!.name).toBe('ollama');
});
it('creates anthropic provider with API key from secret store', async () => {
const store = mockSecretStore({ 'anthropic-api-key': 'sk-ant-test' });
const registry = await createProviderFromConfig(
{ provider: 'anthropic', model: 'claude-haiku-3-5-20241022' },
store,
);
expect(registry.getActive()!.name).toBe('anthropic');
expect(store.get).toHaveBeenCalledWith('anthropic-api-key');
});
it('returns empty registry when anthropic API key is missing', async () => {
const store = mockSecretStore();
const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true);
const registry = await createProviderFromConfig(
{ provider: 'anthropic', model: 'claude-haiku-3-5-20241022' },
store,
);
expect(registry.getActive()).toBeNull();
expect(stderrSpy).toHaveBeenCalledWith(expect.stringContaining('Anthropic API key not found'));
stderrSpy.mockRestore();
});
it('creates openai provider with API key from secret store', async () => {
const store = mockSecretStore({ 'openai-api-key': 'sk-test' });
const registry = await createProviderFromConfig(
{ provider: 'openai', model: 'gpt-4o', url: 'https://api.openai.com' },
store,
);
expect(registry.getActive()!.name).toBe('openai');
expect(store.get).toHaveBeenCalledWith('openai-api-key');
});
it('returns empty registry when openai API key is missing', async () => {
const store = mockSecretStore();
const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true);
const registry = await createProviderFromConfig(
{ provider: 'openai' },
store,
);
expect(registry.getActive()).toBeNull();
stderrSpy.mockRestore();
});
it('creates deepseek provider with API key from secret store', async () => {
const store = mockSecretStore({ 'deepseek-api-key': 'sk-ds-test' });
const registry = await createProviderFromConfig(
{ provider: 'deepseek', model: 'deepseek-chat' },
store,
);
expect(registry.getActive()!.name).toBe('deepseek');
expect(store.get).toHaveBeenCalledWith('deepseek-api-key');
});
it('returns empty registry when deepseek API key is missing', async () => {
const store = mockSecretStore();
const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true);
const registry = await createProviderFromConfig(
{ provider: 'deepseek' },
store,
);
expect(registry.getActive()).toBeNull();
stderrSpy.mockRestore();
});
it('creates vllm provider using OpenAI provider', async () => {
const store = mockSecretStore();
const registry = await createProviderFromConfig(
{ provider: 'vllm', model: 'my-model', url: 'http://gpu-server:8000' },
store,
);
// vLLM reuses OpenAI provider under the hood
expect(registry.getActive()).not.toBeNull();
expect(registry.getActive()!.name).toBe('openai');
});
it('returns empty registry when vllm URL is missing', async () => {
const store = mockSecretStore();
const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true);
const registry = await createProviderFromConfig(
{ provider: 'vllm' },
store,
);
expect(registry.getActive()).toBeNull();
expect(stderrSpy).toHaveBeenCalledWith(expect.stringContaining('vLLM URL not configured'));
stderrSpy.mockRestore();
});
});