Compare commits
7 Commits
feat/promp
...
feat/llm-c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7c23da10c6 | ||
| 32b4de4343 | |||
|
|
e06db9afba | ||
|
|
a25809b84a | ||
| f5a902d3e0 | |||
|
|
9cb0c5ce24 | ||
| 06230ec034 |
319
src/cli/src/commands/config-setup.ts
Normal file
319
src/cli/src/commands/config-setup.ts
Normal file
@@ -0,0 +1,319 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import http from 'node:http';
|
||||||
|
import https from 'node:https';
|
||||||
|
import { loadConfig, saveConfig } from '../config/index.js';
|
||||||
|
import type { ConfigLoaderDeps, McpctlConfig, LlmConfig, LlmProviderName } from '../config/index.js';
|
||||||
|
import type { SecretStore } from '@mcpctl/shared';
|
||||||
|
import { createSecretStore } from '@mcpctl/shared';
|
||||||
|
|
||||||
|
export interface ConfigSetupPrompt {
|
||||||
|
select<T>(message: string, choices: Array<{ name: string; value: T; description?: string }>): Promise<T>;
|
||||||
|
input(message: string, defaultValue?: string): Promise<string>;
|
||||||
|
password(message: string): Promise<string>;
|
||||||
|
confirm(message: string, defaultValue?: boolean): Promise<boolean>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ConfigSetupDeps {
|
||||||
|
configDeps: Partial<ConfigLoaderDeps>;
|
||||||
|
secretStore: SecretStore;
|
||||||
|
log: (...args: string[]) => void;
|
||||||
|
prompt: ConfigSetupPrompt;
|
||||||
|
fetchModels: (url: string, path: string) => Promise<string[]>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ProviderChoice {
|
||||||
|
name: string;
|
||||||
|
value: LlmProviderName;
|
||||||
|
description: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const PROVIDER_CHOICES: ProviderChoice[] = [
|
||||||
|
{ name: 'Gemini CLI', value: 'gemini-cli', description: 'Google Gemini via local CLI (free, no API key)' },
|
||||||
|
{ name: 'Ollama', value: 'ollama', description: 'Local models via Ollama' },
|
||||||
|
{ name: 'Anthropic (Claude)', value: 'anthropic', description: 'Claude API (requires API key)' },
|
||||||
|
{ name: 'vLLM', value: 'vllm', description: 'Self-hosted vLLM (OpenAI-compatible)' },
|
||||||
|
{ name: 'OpenAI', value: 'openai', description: 'OpenAI API (requires API key)' },
|
||||||
|
{ name: 'DeepSeek', value: 'deepseek', description: 'DeepSeek API (requires API key)' },
|
||||||
|
{ name: 'None (disable)', value: 'none', description: 'Disable LLM features' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const GEMINI_MODELS = ['gemini-2.5-flash', 'gemini-2.5-pro', 'gemini-2.0-flash'];
|
||||||
|
const ANTHROPIC_MODELS = ['claude-haiku-3-5-20241022', 'claude-sonnet-4-20250514', 'claude-opus-4-20250514'];
|
||||||
|
const DEEPSEEK_MODELS = ['deepseek-chat', 'deepseek-reasoner'];
|
||||||
|
|
||||||
|
function defaultFetchModels(baseUrl: string, path: string): Promise<string[]> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const url = new URL(path, baseUrl);
|
||||||
|
const isHttps = url.protocol === 'https:';
|
||||||
|
const transport = isHttps ? https : http;
|
||||||
|
|
||||||
|
const req = transport.get({
|
||||||
|
hostname: url.hostname,
|
||||||
|
port: url.port || (isHttps ? 443 : 80),
|
||||||
|
path: url.pathname,
|
||||||
|
timeout: 5000,
|
||||||
|
}, (res) => {
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||||
|
res.on('end', () => {
|
||||||
|
try {
|
||||||
|
const raw = Buffer.concat(chunks).toString('utf-8');
|
||||||
|
const data = JSON.parse(raw) as { models?: Array<{ name: string }>; data?: Array<{ id: string }> };
|
||||||
|
// Ollama format: { models: [{ name }] }
|
||||||
|
if (data.models) {
|
||||||
|
resolve(data.models.map((m) => m.name));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// OpenAI/vLLM format: { data: [{ id }] }
|
||||||
|
if (data.data) {
|
||||||
|
resolve(data.data.map((m) => m.id));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
resolve([]);
|
||||||
|
} catch {
|
||||||
|
resolve([]);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
req.on('error', () => resolve([]));
|
||||||
|
req.on('timeout', () => { req.destroy(); resolve([]); });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function defaultSelect<T>(message: string, choices: Array<{ name: string; value: T; description?: string }>): Promise<T> {
|
||||||
|
const { default: inquirer } = await import('inquirer');
|
||||||
|
const { answer } = await inquirer.prompt([{
|
||||||
|
type: 'list',
|
||||||
|
name: 'answer',
|
||||||
|
message,
|
||||||
|
choices: choices.map((c) => ({
|
||||||
|
name: c.description ? `${c.name} — ${c.description}` : c.name,
|
||||||
|
value: c.value,
|
||||||
|
short: c.name,
|
||||||
|
})),
|
||||||
|
}]);
|
||||||
|
return answer as T;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function defaultInput(message: string, defaultValue?: string): Promise<string> {
|
||||||
|
const { default: inquirer } = await import('inquirer');
|
||||||
|
const { answer } = await inquirer.prompt([{
|
||||||
|
type: 'input',
|
||||||
|
name: 'answer',
|
||||||
|
message,
|
||||||
|
default: defaultValue,
|
||||||
|
}]);
|
||||||
|
return answer as string;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function defaultPassword(message: string): Promise<string> {
|
||||||
|
const { default: inquirer } = await import('inquirer');
|
||||||
|
const { answer } = await inquirer.prompt([{ type: 'password', name: 'answer', message }]);
|
||||||
|
return answer as string;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function defaultConfirm(message: string, defaultValue?: boolean): Promise<boolean> {
|
||||||
|
const { default: inquirer } = await import('inquirer');
|
||||||
|
const { answer } = await inquirer.prompt([{
|
||||||
|
type: 'confirm',
|
||||||
|
name: 'answer',
|
||||||
|
message,
|
||||||
|
default: defaultValue ?? true,
|
||||||
|
}]);
|
||||||
|
return answer as boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
const defaultPrompt: ConfigSetupPrompt = {
|
||||||
|
select: defaultSelect,
|
||||||
|
input: defaultInput,
|
||||||
|
password: defaultPassword,
|
||||||
|
confirm: defaultConfirm,
|
||||||
|
};
|
||||||
|
|
||||||
|
export function createConfigSetupCommand(deps?: Partial<ConfigSetupDeps>): Command {
|
||||||
|
return new Command('setup')
|
||||||
|
.description('Interactive LLM provider setup wizard')
|
||||||
|
.action(async () => {
|
||||||
|
const configDeps = deps?.configDeps ?? {};
|
||||||
|
const log = deps?.log ?? ((...args: string[]) => console.log(...args));
|
||||||
|
const prompt = deps?.prompt ?? defaultPrompt;
|
||||||
|
const fetchModels = deps?.fetchModels ?? defaultFetchModels;
|
||||||
|
const secretStore = deps?.secretStore ?? await createSecretStore();
|
||||||
|
|
||||||
|
const config = loadConfig(configDeps);
|
||||||
|
const currentLlm = config.llm;
|
||||||
|
|
||||||
|
// Annotate current provider in choices
|
||||||
|
const choices = PROVIDER_CHOICES.map((c) => {
|
||||||
|
if (currentLlm?.provider === c.value) {
|
||||||
|
return { ...c, name: `${c.name} (current)` };
|
||||||
|
}
|
||||||
|
return c;
|
||||||
|
});
|
||||||
|
|
||||||
|
const provider = await prompt.select<LlmProviderName>('Select LLM provider:', choices);
|
||||||
|
|
||||||
|
if (provider === 'none') {
|
||||||
|
const updated: McpctlConfig = { ...config, llm: { provider: 'none' } };
|
||||||
|
saveConfig(updated, configDeps);
|
||||||
|
log('LLM disabled. Restart mcplocal: systemctl --user restart mcplocal');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let llmConfig: LlmConfig;
|
||||||
|
|
||||||
|
switch (provider) {
|
||||||
|
case 'gemini-cli':
|
||||||
|
llmConfig = await setupGeminiCli(prompt, currentLlm);
|
||||||
|
break;
|
||||||
|
case 'ollama':
|
||||||
|
llmConfig = await setupOllama(prompt, fetchModels, currentLlm);
|
||||||
|
break;
|
||||||
|
case 'anthropic':
|
||||||
|
llmConfig = await setupApiKeyProvider(prompt, secretStore, 'anthropic', 'anthropic-api-key', ANTHROPIC_MODELS, currentLlm);
|
||||||
|
break;
|
||||||
|
case 'vllm':
|
||||||
|
llmConfig = await setupVllm(prompt, fetchModels, currentLlm);
|
||||||
|
break;
|
||||||
|
case 'openai':
|
||||||
|
llmConfig = await setupApiKeyProvider(prompt, secretStore, 'openai', 'openai-api-key', [], currentLlm);
|
||||||
|
break;
|
||||||
|
case 'deepseek':
|
||||||
|
llmConfig = await setupApiKeyProvider(prompt, secretStore, 'deepseek', 'deepseek-api-key', DEEPSEEK_MODELS, currentLlm);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const updated: McpctlConfig = { ...config, llm: llmConfig };
|
||||||
|
saveConfig(updated, configDeps);
|
||||||
|
log(`\nLLM configured: ${llmConfig.provider}${llmConfig.model ? ` / ${llmConfig.model}` : ''}`);
|
||||||
|
log('Restart mcplocal: systemctl --user restart mcplocal');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function setupGeminiCli(prompt: ConfigSetupPrompt, current?: LlmConfig): Promise<LlmConfig> {
|
||||||
|
const model = await prompt.select<string>('Select model:', [
|
||||||
|
...GEMINI_MODELS.map((m) => ({
|
||||||
|
name: m === current?.model ? `${m} (current)` : m,
|
||||||
|
value: m,
|
||||||
|
})),
|
||||||
|
{ name: 'Custom...', value: '__custom__' },
|
||||||
|
]);
|
||||||
|
|
||||||
|
const finalModel = model === '__custom__'
|
||||||
|
? await prompt.input('Model name:', current?.model)
|
||||||
|
: model;
|
||||||
|
|
||||||
|
const customBinary = await prompt.confirm('Use custom binary path?', false);
|
||||||
|
const binaryPath = customBinary
|
||||||
|
? await prompt.input('Binary path:', current?.binaryPath ?? 'gemini')
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
return { provider: 'gemini-cli', model: finalModel, binaryPath };
|
||||||
|
}
|
||||||
|
|
||||||
|
async function setupOllama(prompt: ConfigSetupPrompt, fetchModels: ConfigSetupDeps['fetchModels'], current?: LlmConfig): Promise<LlmConfig> {
|
||||||
|
const url = await prompt.input('Ollama URL:', current?.url ?? 'http://localhost:11434');
|
||||||
|
|
||||||
|
// Try to fetch models from Ollama
|
||||||
|
const models = await fetchModels(url, '/api/tags');
|
||||||
|
let model: string;
|
||||||
|
|
||||||
|
if (models.length > 0) {
|
||||||
|
const choices = models.map((m) => ({
|
||||||
|
name: m === current?.model ? `${m} (current)` : m,
|
||||||
|
value: m,
|
||||||
|
}));
|
||||||
|
choices.push({ name: 'Custom...', value: '__custom__' });
|
||||||
|
model = await prompt.select<string>('Select model:', choices);
|
||||||
|
if (model === '__custom__') {
|
||||||
|
model = await prompt.input('Model name:', current?.model);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
model = await prompt.input('Model name (could not fetch models):', current?.model ?? 'llama3.2');
|
||||||
|
}
|
||||||
|
|
||||||
|
return { provider: 'ollama', model, url };
|
||||||
|
}
|
||||||
|
|
||||||
|
async function setupVllm(prompt: ConfigSetupPrompt, fetchModels: ConfigSetupDeps['fetchModels'], current?: LlmConfig): Promise<LlmConfig> {
|
||||||
|
const url = await prompt.input('vLLM URL:', current?.url ?? 'http://localhost:8000');
|
||||||
|
|
||||||
|
// Try to fetch models from vLLM (OpenAI-compatible)
|
||||||
|
const models = await fetchModels(url, '/v1/models');
|
||||||
|
let model: string;
|
||||||
|
|
||||||
|
if (models.length > 0) {
|
||||||
|
const choices = models.map((m) => ({
|
||||||
|
name: m === current?.model ? `${m} (current)` : m,
|
||||||
|
value: m,
|
||||||
|
}));
|
||||||
|
choices.push({ name: 'Custom...', value: '__custom__' });
|
||||||
|
model = await prompt.select<string>('Select model:', choices);
|
||||||
|
if (model === '__custom__') {
|
||||||
|
model = await prompt.input('Model name:', current?.model);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
model = await prompt.input('Model name (could not fetch models):', current?.model ?? 'default');
|
||||||
|
}
|
||||||
|
|
||||||
|
return { provider: 'vllm', model, url };
|
||||||
|
}
|
||||||
|
|
||||||
|
async function setupApiKeyProvider(
|
||||||
|
prompt: ConfigSetupPrompt,
|
||||||
|
secretStore: SecretStore,
|
||||||
|
provider: LlmProviderName,
|
||||||
|
secretKey: string,
|
||||||
|
hardcodedModels: string[],
|
||||||
|
current?: LlmConfig,
|
||||||
|
): Promise<LlmConfig> {
|
||||||
|
// Check for existing API key
|
||||||
|
const existingKey = await secretStore.get(secretKey);
|
||||||
|
let apiKey: string;
|
||||||
|
|
||||||
|
if (existingKey) {
|
||||||
|
const masked = `****${existingKey.slice(-4)}`;
|
||||||
|
const changeKey = await prompt.confirm(`API key stored (${masked}). Change it?`, false);
|
||||||
|
if (changeKey) {
|
||||||
|
apiKey = await prompt.password('API key:');
|
||||||
|
} else {
|
||||||
|
apiKey = existingKey;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
apiKey = await prompt.password('API key:');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store API key
|
||||||
|
if (apiKey !== existingKey) {
|
||||||
|
await secretStore.set(secretKey, apiKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Model selection
|
||||||
|
let model: string;
|
||||||
|
if (hardcodedModels.length > 0) {
|
||||||
|
const choices = hardcodedModels.map((m) => ({
|
||||||
|
name: m === current?.model ? `${m} (current)` : m,
|
||||||
|
value: m,
|
||||||
|
}));
|
||||||
|
choices.push({ name: 'Custom...', value: '__custom__' });
|
||||||
|
model = await prompt.select<string>('Select model:', choices);
|
||||||
|
if (model === '__custom__') {
|
||||||
|
model = await prompt.input('Model name:', current?.model);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
model = await prompt.input('Model name:', current?.model ?? 'gpt-4o');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Optional custom URL for openai
|
||||||
|
let url: string | undefined;
|
||||||
|
if (provider === 'openai') {
|
||||||
|
const customUrl = await prompt.confirm('Use custom API endpoint?', false);
|
||||||
|
if (customUrl) {
|
||||||
|
url = await prompt.input('API URL:', current?.url ?? 'https://api.openai.com');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { provider, model, url };
|
||||||
|
}
|
||||||
@@ -6,6 +6,7 @@ import { loadConfig, saveConfig, mergeConfig, getConfigPath, DEFAULT_CONFIG } fr
|
|||||||
import type { McpctlConfig, ConfigLoaderDeps } from '../config/index.js';
|
import type { McpctlConfig, ConfigLoaderDeps } from '../config/index.js';
|
||||||
import { formatJson, formatYaml } from '../formatters/index.js';
|
import { formatJson, formatYaml } from '../formatters/index.js';
|
||||||
import { saveCredentials, loadCredentials } from '../auth/index.js';
|
import { saveCredentials, loadCredentials } from '../auth/index.js';
|
||||||
|
import { createConfigSetupCommand } from './config-setup.js';
|
||||||
import type { CredentialsDeps, StoredCredentials } from '../auth/index.js';
|
import type { CredentialsDeps, StoredCredentials } from '../auth/index.js';
|
||||||
import type { ApiClient } from '../api-client.js';
|
import type { ApiClient } from '../api-client.js';
|
||||||
|
|
||||||
@@ -98,7 +99,7 @@ export function createConfigCommand(deps?: Partial<ConfigCommandDeps>, apiDeps?:
|
|||||||
mcpServers: {
|
mcpServers: {
|
||||||
[opts.project]: {
|
[opts.project]: {
|
||||||
command: 'mcpctl',
|
command: 'mcpctl',
|
||||||
args: ['mcp', '--project', opts.project],
|
args: ['mcp', '-p', opts.project],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
@@ -138,6 +139,8 @@ export function createConfigCommand(deps?: Partial<ConfigCommandDeps>, apiDeps?:
|
|||||||
registerClaudeCommand('claude', false);
|
registerClaudeCommand('claude', false);
|
||||||
registerClaudeCommand('claude-generate', true); // backward compat
|
registerClaudeCommand('claude-generate', true); // backward compat
|
||||||
|
|
||||||
|
config.addCommand(createConfigSetupCommand({ configDeps }));
|
||||||
|
|
||||||
if (apiDeps) {
|
if (apiDeps) {
|
||||||
const { client, credentialsDeps, log: apiLog } = apiDeps;
|
const { client, credentialsDeps, log: apiLog } = apiDeps;
|
||||||
|
|
||||||
|
|||||||
@@ -50,6 +50,10 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
|
|||||||
checkHealth(config.mcpdUrl),
|
checkHealth(config.mcpdUrl),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
const llm = config.llm && config.llm.provider !== 'none'
|
||||||
|
? `${config.llm.provider}${config.llm.model ? ` / ${config.llm.model}` : ''}`
|
||||||
|
: null;
|
||||||
|
|
||||||
const status = {
|
const status = {
|
||||||
version: APP_VERSION,
|
version: APP_VERSION,
|
||||||
mcplocalUrl: config.mcplocalUrl,
|
mcplocalUrl: config.mcplocalUrl,
|
||||||
@@ -59,6 +63,7 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
|
|||||||
auth: creds ? { user: creds.user } : null,
|
auth: creds ? { user: creds.user } : null,
|
||||||
registries: config.registries,
|
registries: config.registries,
|
||||||
outputFormat: config.outputFormat,
|
outputFormat: config.outputFormat,
|
||||||
|
llm,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (opts.output === 'json') {
|
if (opts.output === 'json') {
|
||||||
@@ -72,6 +77,8 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
|
|||||||
log(`Auth: ${creds ? `logged in as ${creds.user}` : 'not logged in'}`);
|
log(`Auth: ${creds ? `logged in as ${creds.user}` : 'not logged in'}`);
|
||||||
log(`Registries: ${status.registries.join(', ')}`);
|
log(`Registries: ${status.registries.join(', ')}`);
|
||||||
log(`Output: ${status.outputFormat}`);
|
log(`Output: ${status.outputFormat}`);
|
||||||
|
log(`LLM: ${status.llm ?? "not configured (run 'mcpctl config setup')"}`);
|
||||||
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
export { McpctlConfigSchema, DEFAULT_CONFIG } from './schema.js';
|
export { McpctlConfigSchema, LlmConfigSchema, LLM_PROVIDERS, DEFAULT_CONFIG } from './schema.js';
|
||||||
export type { McpctlConfig } from './schema.js';
|
export type { McpctlConfig, LlmConfig, LlmProviderName } from './schema.js';
|
||||||
export { loadConfig, saveConfig, mergeConfig, getConfigPath } from './loader.js';
|
export { loadConfig, saveConfig, mergeConfig, getConfigPath } from './loader.js';
|
||||||
export type { ConfigLoaderDeps } from './loader.js';
|
export type { ConfigLoaderDeps } from './loader.js';
|
||||||
|
|||||||
@@ -1,5 +1,21 @@
|
|||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
|
export const LLM_PROVIDERS = ['gemini-cli', 'ollama', 'anthropic', 'openai', 'deepseek', 'vllm', 'none'] as const;
|
||||||
|
export type LlmProviderName = typeof LLM_PROVIDERS[number];
|
||||||
|
|
||||||
|
export const LlmConfigSchema = z.object({
|
||||||
|
/** LLM provider name */
|
||||||
|
provider: z.enum(LLM_PROVIDERS),
|
||||||
|
/** Model name */
|
||||||
|
model: z.string().optional(),
|
||||||
|
/** Provider URL (for ollama, vllm, openai with custom endpoint) */
|
||||||
|
url: z.string().optional(),
|
||||||
|
/** Binary path override (for gemini-cli) */
|
||||||
|
binaryPath: z.string().optional(),
|
||||||
|
}).strict();
|
||||||
|
|
||||||
|
export type LlmConfig = z.infer<typeof LlmConfigSchema>;
|
||||||
|
|
||||||
export const McpctlConfigSchema = z.object({
|
export const McpctlConfigSchema = z.object({
|
||||||
/** mcplocal daemon endpoint (local LLM pre-processing proxy) */
|
/** mcplocal daemon endpoint (local LLM pre-processing proxy) */
|
||||||
mcplocalUrl: z.string().default('http://localhost:3200'),
|
mcplocalUrl: z.string().default('http://localhost:3200'),
|
||||||
@@ -19,6 +35,8 @@ export const McpctlConfigSchema = z.object({
|
|||||||
outputFormat: z.enum(['table', 'json', 'yaml']).default('table'),
|
outputFormat: z.enum(['table', 'json', 'yaml']).default('table'),
|
||||||
/** Smithery API key */
|
/** Smithery API key */
|
||||||
smitheryApiKey: z.string().optional(),
|
smitheryApiKey: z.string().optional(),
|
||||||
|
/** LLM provider configuration for smart features (pagination summaries, etc.) */
|
||||||
|
llm: LlmConfigSchema.optional(),
|
||||||
}).transform((cfg) => {
|
}).transform((cfg) => {
|
||||||
// Backward compatibility: if old daemonUrl is set but mcplocalUrl wasn't explicitly changed,
|
// Backward compatibility: if old daemonUrl is set but mcplocalUrl wasn't explicitly changed,
|
||||||
// use daemonUrl as mcplocalUrl
|
// use daemonUrl as mcplocalUrl
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ describe('config claude', () => {
|
|||||||
const written = JSON.parse(readFileSync(outPath, 'utf-8'));
|
const written = JSON.parse(readFileSync(outPath, 'utf-8'));
|
||||||
expect(written.mcpServers['homeautomation']).toEqual({
|
expect(written.mcpServers['homeautomation']).toEqual({
|
||||||
command: 'mcpctl',
|
command: 'mcpctl',
|
||||||
args: ['mcp', '--project', 'homeautomation'],
|
args: ['mcp', '-p', 'homeautomation'],
|
||||||
});
|
});
|
||||||
expect(output.join('\n')).toContain('1 server(s)');
|
expect(output.join('\n')).toContain('1 server(s)');
|
||||||
});
|
});
|
||||||
@@ -60,7 +60,7 @@ describe('config claude', () => {
|
|||||||
const parsed = JSON.parse(output[0]);
|
const parsed = JSON.parse(output[0]);
|
||||||
expect(parsed.mcpServers['myproj']).toEqual({
|
expect(parsed.mcpServers['myproj']).toEqual({
|
||||||
command: 'mcpctl',
|
command: 'mcpctl',
|
||||||
args: ['mcp', '--project', 'myproj'],
|
args: ['mcp', '-p', 'myproj'],
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -80,7 +80,7 @@ describe('config claude', () => {
|
|||||||
expect(written.mcpServers['existing--server']).toBeDefined();
|
expect(written.mcpServers['existing--server']).toBeDefined();
|
||||||
expect(written.mcpServers['proj-1']).toEqual({
|
expect(written.mcpServers['proj-1']).toEqual({
|
||||||
command: 'mcpctl',
|
command: 'mcpctl',
|
||||||
args: ['mcp', '--project', 'proj-1'],
|
args: ['mcp', '-p', 'proj-1'],
|
||||||
});
|
});
|
||||||
expect(output.join('\n')).toContain('2 server(s)');
|
expect(output.join('\n')).toContain('2 server(s)');
|
||||||
});
|
});
|
||||||
@@ -96,7 +96,7 @@ describe('config claude', () => {
|
|||||||
const written = JSON.parse(readFileSync(outPath, 'utf-8'));
|
const written = JSON.parse(readFileSync(outPath, 'utf-8'));
|
||||||
expect(written.mcpServers['proj-1']).toEqual({
|
expect(written.mcpServers['proj-1']).toEqual({
|
||||||
command: 'mcpctl',
|
command: 'mcpctl',
|
||||||
args: ['mcp', '--project', 'proj-1'],
|
args: ['mcp', '-p', 'proj-1'],
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
268
src/cli/tests/commands/config-setup.test.ts
Normal file
268
src/cli/tests/commands/config-setup.test.ts
Normal file
@@ -0,0 +1,268 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import { createConfigSetupCommand } from '../../src/commands/config-setup.js';
|
||||||
|
import type { ConfigSetupDeps, ConfigSetupPrompt } from '../../src/commands/config-setup.js';
|
||||||
|
import type { SecretStore } from '@mcpctl/shared';
|
||||||
|
import { mkdtempSync, rmSync, readFileSync } from 'node:fs';
|
||||||
|
import { join } from 'node:path';
|
||||||
|
import { tmpdir } from 'node:os';
|
||||||
|
|
||||||
|
let tempDir: string;
|
||||||
|
let logs: string[];
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-config-setup-test-'));
|
||||||
|
logs = [];
|
||||||
|
});
|
||||||
|
|
||||||
|
function cleanup() {
|
||||||
|
rmSync(tempDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
function mockSecretStore(secrets: Record<string, string> = {}): SecretStore {
|
||||||
|
const store: Record<string, string> = { ...secrets };
|
||||||
|
return {
|
||||||
|
get: vi.fn(async (key: string) => store[key] ?? null),
|
||||||
|
set: vi.fn(async (key: string, value: string) => { store[key] = value; }),
|
||||||
|
delete: vi.fn(async () => true),
|
||||||
|
backend: () => 'mock',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function mockPrompt(answers: unknown[]): ConfigSetupPrompt {
|
||||||
|
let callIndex = 0;
|
||||||
|
return {
|
||||||
|
select: vi.fn(async () => answers[callIndex++]),
|
||||||
|
input: vi.fn(async () => answers[callIndex++] as string),
|
||||||
|
password: vi.fn(async () => answers[callIndex++] as string),
|
||||||
|
confirm: vi.fn(async () => answers[callIndex++] as boolean),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildDeps(overrides: {
|
||||||
|
secrets?: Record<string, string>;
|
||||||
|
answers?: unknown[];
|
||||||
|
fetchModels?: ConfigSetupDeps['fetchModels'];
|
||||||
|
} = {}): ConfigSetupDeps {
|
||||||
|
return {
|
||||||
|
configDeps: { configDir: tempDir },
|
||||||
|
secretStore: mockSecretStore(overrides.secrets),
|
||||||
|
log: (...args: string[]) => logs.push(args.join(' ')),
|
||||||
|
prompt: mockPrompt(overrides.answers ?? []),
|
||||||
|
fetchModels: overrides.fetchModels ?? vi.fn(async () => []),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function readConfig(): Record<string, unknown> {
|
||||||
|
const raw = readFileSync(join(tempDir, 'config.json'), 'utf-8');
|
||||||
|
return JSON.parse(raw) as Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runSetup(deps: ConfigSetupDeps): Promise<void> {
|
||||||
|
const cmd = createConfigSetupCommand(deps);
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('config setup wizard', () => {
|
||||||
|
describe('provider: none', () => {
|
||||||
|
it('disables LLM and saves config', async () => {
|
||||||
|
const deps = buildDeps({ answers: ['none'] });
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
const config = readConfig();
|
||||||
|
expect(config.llm).toEqual({ provider: 'none' });
|
||||||
|
expect(logs.some((l) => l.includes('LLM disabled'))).toBe(true);
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('provider: gemini-cli', () => {
|
||||||
|
it('saves gemini-cli with selected model', async () => {
|
||||||
|
// Answers: select provider, select model, confirm custom binary=false
|
||||||
|
const deps = buildDeps({ answers: ['gemini-cli', 'gemini-2.5-flash', false] });
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
const config = readConfig();
|
||||||
|
expect((config.llm as Record<string, unknown>).provider).toBe('gemini-cli');
|
||||||
|
expect((config.llm as Record<string, unknown>).model).toBe('gemini-2.5-flash');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('saves gemini-cli with custom model and binary path', async () => {
|
||||||
|
// Answers: select provider, select custom, enter model name, confirm custom binary=true, enter path
|
||||||
|
const deps = buildDeps({ answers: ['gemini-cli', '__custom__', 'gemini-3.0-flash', true, '/opt/gemini'] });
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.model).toBe('gemini-3.0-flash');
|
||||||
|
expect(llm.binaryPath).toBe('/opt/gemini');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('provider: ollama', () => {
|
||||||
|
it('fetches models and allows selection', async () => {
|
||||||
|
const fetchModels = vi.fn(async () => ['llama3.2', 'codellama', 'mistral']);
|
||||||
|
// Answers: select provider, enter URL, select model
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['ollama', 'http://localhost:11434', 'codellama'],
|
||||||
|
fetchModels,
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(fetchModels).toHaveBeenCalledWith('http://localhost:11434', '/api/tags');
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.provider).toBe('ollama');
|
||||||
|
expect(llm.model).toBe('codellama');
|
||||||
|
expect(llm.url).toBe('http://localhost:11434');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('falls back to manual input when fetch fails', async () => {
|
||||||
|
const fetchModels = vi.fn(async () => []);
|
||||||
|
// Answers: select provider, enter URL, enter model manually
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['ollama', 'http://localhost:11434', 'llama3.2'],
|
||||||
|
fetchModels,
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
const config = readConfig();
|
||||||
|
expect((config.llm as Record<string, unknown>).model).toBe('llama3.2');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('provider: anthropic', () => {
|
||||||
|
it('prompts for API key and saves to secret store', async () => {
|
||||||
|
// Answers: select provider, enter API key, select model
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['anthropic', 'sk-ant-new-key', 'claude-haiku-3-5-20241022'],
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(deps.secretStore.set).toHaveBeenCalledWith('anthropic-api-key', 'sk-ant-new-key');
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.provider).toBe('anthropic');
|
||||||
|
expect(llm.model).toBe('claude-haiku-3-5-20241022');
|
||||||
|
// API key should NOT be in config file
|
||||||
|
expect(llm).not.toHaveProperty('apiKey');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows existing key masked and allows keeping it', async () => {
|
||||||
|
// Answers: select provider, confirm change=false, select model
|
||||||
|
const deps = buildDeps({
|
||||||
|
secrets: { 'anthropic-api-key': 'sk-ant-existing-key-1234' },
|
||||||
|
answers: ['anthropic', false, 'claude-sonnet-4-20250514'],
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
// Should NOT have called set (kept existing key)
|
||||||
|
expect(deps.secretStore.set).not.toHaveBeenCalled();
|
||||||
|
const config = readConfig();
|
||||||
|
expect((config.llm as Record<string, unknown>).model).toBe('claude-sonnet-4-20250514');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('allows replacing existing key', async () => {
|
||||||
|
// Answers: select provider, confirm change=true, enter new key, select model
|
||||||
|
const deps = buildDeps({
|
||||||
|
secrets: { 'anthropic-api-key': 'sk-ant-old' },
|
||||||
|
answers: ['anthropic', true, 'sk-ant-new', 'claude-haiku-3-5-20241022'],
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(deps.secretStore.set).toHaveBeenCalledWith('anthropic-api-key', 'sk-ant-new');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('provider: vllm', () => {
|
||||||
|
it('fetches models from vLLM and allows selection', async () => {
|
||||||
|
const fetchModels = vi.fn(async () => ['my-model', 'llama-70b']);
|
||||||
|
// Answers: select provider, enter URL, select model
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['vllm', 'http://gpu:8000', 'llama-70b'],
|
||||||
|
fetchModels,
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(fetchModels).toHaveBeenCalledWith('http://gpu:8000', '/v1/models');
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.provider).toBe('vllm');
|
||||||
|
expect(llm.url).toBe('http://gpu:8000');
|
||||||
|
expect(llm.model).toBe('llama-70b');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('provider: openai', () => {
|
||||||
|
it('prompts for key, model, and optional custom endpoint', async () => {
|
||||||
|
// Answers: select provider, enter key, enter model, confirm custom URL=true, enter URL
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['openai', 'sk-openai-key', 'gpt-4o', true, 'https://custom.api.com'],
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(deps.secretStore.set).toHaveBeenCalledWith('openai-api-key', 'sk-openai-key');
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.provider).toBe('openai');
|
||||||
|
expect(llm.model).toBe('gpt-4o');
|
||||||
|
expect(llm.url).toBe('https://custom.api.com');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('skips custom URL when not requested', async () => {
|
||||||
|
// Answers: select provider, enter key, enter model, confirm custom URL=false
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['openai', 'sk-openai-key', 'gpt-4o-mini', false],
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.url).toBeUndefined();
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('provider: deepseek', () => {
|
||||||
|
it('prompts for key and model', async () => {
|
||||||
|
// Answers: select provider, enter key, select model
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['deepseek', 'sk-ds-key', 'deepseek-chat'],
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(deps.secretStore.set).toHaveBeenCalledWith('deepseek-api-key', 'sk-ds-key');
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.provider).toBe('deepseek');
|
||||||
|
expect(llm.model).toBe('deepseek-chat');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('output messages', () => {
|
||||||
|
it('shows restart instruction', async () => {
|
||||||
|
const deps = buildDeps({ answers: ['gemini-cli', 'gemini-2.5-flash', false] });
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(logs.some((l) => l.includes('systemctl --user restart mcplocal'))).toBe(true);
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows configured provider and model', async () => {
|
||||||
|
const deps = buildDeps({ answers: ['gemini-cli', 'gemini-2.5-flash', false] });
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(logs.some((l) => l.includes('gemini-cli') && l.includes('gemini-2.5-flash'))).toBe(true);
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -126,4 +126,69 @@ describe('status command', () => {
|
|||||||
expect(output.join('\n')).toContain('official');
|
expect(output.join('\n')).toContain('official');
|
||||||
expect(output.join('\n')).not.toContain('glama');
|
expect(output.join('\n')).not.toContain('glama');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('shows LLM not configured hint when no LLM is set', async () => {
|
||||||
|
const cmd = createStatusCommand({
|
||||||
|
configDeps: { configDir: tempDir },
|
||||||
|
credentialsDeps: { configDir: tempDir },
|
||||||
|
log,
|
||||||
|
checkHealth: async () => true,
|
||||||
|
});
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
const out = output.join('\n');
|
||||||
|
expect(out).toContain('LLM:');
|
||||||
|
expect(out).toContain('not configured');
|
||||||
|
expect(out).toContain('mcpctl config setup');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows configured LLM provider and model', async () => {
|
||||||
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'anthropic', model: 'claude-haiku-3-5-20241022' } }, { configDir: tempDir });
|
||||||
|
const cmd = createStatusCommand({
|
||||||
|
configDeps: { configDir: tempDir },
|
||||||
|
credentialsDeps: { configDir: tempDir },
|
||||||
|
log,
|
||||||
|
checkHealth: async () => true,
|
||||||
|
});
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
const out = output.join('\n');
|
||||||
|
expect(out).toContain('LLM:');
|
||||||
|
expect(out).toContain('anthropic / claude-haiku-3-5-20241022');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows not configured when LLM provider is none', async () => {
|
||||||
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'none' } }, { configDir: tempDir });
|
||||||
|
const cmd = createStatusCommand({
|
||||||
|
configDeps: { configDir: tempDir },
|
||||||
|
credentialsDeps: { configDir: tempDir },
|
||||||
|
log,
|
||||||
|
checkHealth: async () => true,
|
||||||
|
});
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
expect(output.join('\n')).toContain('not configured');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('includes llm field in JSON output', async () => {
|
||||||
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
||||||
|
const cmd = createStatusCommand({
|
||||||
|
configDeps: { configDir: tempDir },
|
||||||
|
credentialsDeps: { configDir: tempDir },
|
||||||
|
log,
|
||||||
|
checkHealth: async () => true,
|
||||||
|
});
|
||||||
|
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
||||||
|
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
||||||
|
expect(parsed['llm']).toBe('gemini-cli / gemini-2.5-flash');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('includes null llm in JSON output when not configured', async () => {
|
||||||
|
const cmd = createStatusCommand({
|
||||||
|
configDeps: { configDir: tempDir },
|
||||||
|
credentialsDeps: { configDir: tempDir },
|
||||||
|
log,
|
||||||
|
checkHealth: async () => true,
|
||||||
|
});
|
||||||
|
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
||||||
|
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
||||||
|
expect(parsed['llm']).toBeNull();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -61,9 +61,10 @@ proc.stdout.on('data', d => {
|
|||||||
const msg = JSON.parse(line);
|
const msg = JSON.parse(line);
|
||||||
if (msg.id === 2) {
|
if (msg.id === 2) {
|
||||||
responded = true;
|
responded = true;
|
||||||
process.stdout.write(JSON.stringify(msg));
|
process.stdout.write(JSON.stringify(msg), () => {
|
||||||
proc.kill();
|
proc.kill();
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
} catch {}
|
} catch {}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,7 @@
|
|||||||
|
import { existsSync, readFileSync } from 'node:fs';
|
||||||
|
import { join } from 'node:path';
|
||||||
|
import { homedir } from 'node:os';
|
||||||
|
|
||||||
/** Configuration for the mcplocal HTTP server. */
|
/** Configuration for the mcplocal HTTP server. */
|
||||||
export interface HttpConfig {
|
export interface HttpConfig {
|
||||||
/** Port for the HTTP server (default: 3200) */
|
/** Port for the HTTP server (default: 3200) */
|
||||||
@@ -15,9 +19,48 @@ export interface HttpConfig {
|
|||||||
const DEFAULT_HTTP_PORT = 3200;
|
const DEFAULT_HTTP_PORT = 3200;
|
||||||
const DEFAULT_HTTP_HOST = '127.0.0.1';
|
const DEFAULT_HTTP_HOST = '127.0.0.1';
|
||||||
const DEFAULT_MCPD_URL = 'http://localhost:3100';
|
const DEFAULT_MCPD_URL = 'http://localhost:3100';
|
||||||
const DEFAULT_MCPD_TOKEN = '';
|
|
||||||
const DEFAULT_LOG_LEVEL = 'info';
|
const DEFAULT_LOG_LEVEL = 'info';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read the user's mcpctl credentials from ~/.mcpctl/credentials.
|
||||||
|
* Returns the token if found, empty string otherwise.
|
||||||
|
*/
|
||||||
|
function loadUserToken(): string {
|
||||||
|
try {
|
||||||
|
const credPath = join(homedir(), '.mcpctl', 'credentials');
|
||||||
|
if (!existsSync(credPath)) return '';
|
||||||
|
const raw = readFileSync(credPath, 'utf-8');
|
||||||
|
const parsed = JSON.parse(raw) as { token?: string };
|
||||||
|
return parsed.token ?? '';
|
||||||
|
} catch {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface LlmFileConfig {
|
||||||
|
provider: string;
|
||||||
|
model?: string;
|
||||||
|
url?: string;
|
||||||
|
binaryPath?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load LLM configuration from ~/.mcpctl/config.json.
|
||||||
|
* Returns undefined if no LLM section is configured.
|
||||||
|
*/
|
||||||
|
export function loadLlmConfig(): LlmFileConfig | undefined {
|
||||||
|
try {
|
||||||
|
const configPath = join(homedir(), '.mcpctl', 'config.json');
|
||||||
|
if (!existsSync(configPath)) return undefined;
|
||||||
|
const raw = readFileSync(configPath, 'utf-8');
|
||||||
|
const parsed = JSON.parse(raw) as { llm?: LlmFileConfig };
|
||||||
|
if (!parsed.llm?.provider || parsed.llm.provider === 'none') return undefined;
|
||||||
|
return parsed.llm;
|
||||||
|
} catch {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export function loadHttpConfig(env: Record<string, string | undefined> = process.env): HttpConfig {
|
export function loadHttpConfig(env: Record<string, string | undefined> = process.env): HttpConfig {
|
||||||
const portStr = env['MCPLOCAL_HTTP_PORT'];
|
const portStr = env['MCPLOCAL_HTTP_PORT'];
|
||||||
const port = portStr !== undefined ? parseInt(portStr, 10) : DEFAULT_HTTP_PORT;
|
const port = portStr !== undefined ? parseInt(portStr, 10) : DEFAULT_HTTP_PORT;
|
||||||
@@ -26,7 +69,7 @@ export function loadHttpConfig(env: Record<string, string | undefined> = process
|
|||||||
httpPort: Number.isFinite(port) ? port : DEFAULT_HTTP_PORT,
|
httpPort: Number.isFinite(port) ? port : DEFAULT_HTTP_PORT,
|
||||||
httpHost: env['MCPLOCAL_HTTP_HOST'] ?? DEFAULT_HTTP_HOST,
|
httpHost: env['MCPLOCAL_HTTP_HOST'] ?? DEFAULT_HTTP_HOST,
|
||||||
mcpdUrl: env['MCPLOCAL_MCPD_URL'] ?? DEFAULT_MCPD_URL,
|
mcpdUrl: env['MCPLOCAL_MCPD_URL'] ?? DEFAULT_MCPD_URL,
|
||||||
mcpdToken: env['MCPLOCAL_MCPD_TOKEN'] ?? DEFAULT_MCPD_TOKEN,
|
mcpdToken: env['MCPLOCAL_MCPD_TOKEN'] ?? loadUserToken(),
|
||||||
logLevel: (env['MCPLOCAL_LOG_LEVEL'] as HttpConfig['logLevel'] | undefined) ?? DEFAULT_LOG_LEVEL,
|
logLevel: (env['MCPLOCAL_LOG_LEVEL'] as HttpConfig['logLevel'] | undefined) ?? DEFAULT_LOG_LEVEL,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,8 +12,10 @@ import type { FastifyInstance } from 'fastify';
|
|||||||
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js';
|
import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/streamableHttp.js';
|
||||||
import type { JSONRPCMessage } from '@modelcontextprotocol/sdk/types.js';
|
import type { JSONRPCMessage } from '@modelcontextprotocol/sdk/types.js';
|
||||||
import { McpRouter } from '../router.js';
|
import { McpRouter } from '../router.js';
|
||||||
|
import { ResponsePaginator } from '../llm/pagination.js';
|
||||||
import { refreshProjectUpstreams } from '../discovery.js';
|
import { refreshProjectUpstreams } from '../discovery.js';
|
||||||
import type { McpdClient } from './mcpd-client.js';
|
import type { McpdClient } from './mcpd-client.js';
|
||||||
|
import type { ProviderRegistry } from '../providers/registry.js';
|
||||||
import type { JsonRpcRequest } from '../types.js';
|
import type { JsonRpcRequest } from '../types.js';
|
||||||
|
|
||||||
interface ProjectCacheEntry {
|
interface ProjectCacheEntry {
|
||||||
@@ -28,7 +30,7 @@ interface SessionEntry {
|
|||||||
|
|
||||||
const CACHE_TTL_MS = 60_000; // 60 seconds
|
const CACHE_TTL_MS = 60_000; // 60 seconds
|
||||||
|
|
||||||
export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: McpdClient): void {
|
export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: McpdClient, providerRegistry?: ProviderRegistry | null): void {
|
||||||
const projectCache = new Map<string, ProjectCacheEntry>();
|
const projectCache = new Map<string, ProjectCacheEntry>();
|
||||||
const sessions = new Map<string, SessionEntry>();
|
const sessions = new Map<string, SessionEntry>();
|
||||||
|
|
||||||
@@ -44,6 +46,9 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
|
|||||||
const router = existing?.router ?? new McpRouter();
|
const router = existing?.router ?? new McpRouter();
|
||||||
await refreshProjectUpstreams(router, mcpdClient, projectName, authToken);
|
await refreshProjectUpstreams(router, mcpdClient, projectName, authToken);
|
||||||
|
|
||||||
|
// Wire pagination support with LLM provider if configured
|
||||||
|
router.setPaginator(new ResponsePaginator(providerRegistry?.getActive() ?? null));
|
||||||
|
|
||||||
// Configure prompt resources with SA-scoped client for RBAC
|
// Configure prompt resources with SA-scoped client for RBAC
|
||||||
const saClient = mcpdClient.withHeaders({ 'X-Service-Account': `project:${projectName}` });
|
const saClient = mcpdClient.withHeaders({ 'X-Service-Account': `project:${projectName}` });
|
||||||
router.setPromptConfig(saClient, projectName);
|
router.setPromptConfig(saClient, projectName);
|
||||||
|
|||||||
@@ -10,11 +10,13 @@ import { registerProjectMcpEndpoint } from './project-mcp-endpoint.js';
|
|||||||
import type { McpRouter } from '../router.js';
|
import type { McpRouter } from '../router.js';
|
||||||
import type { HealthMonitor } from '../health.js';
|
import type { HealthMonitor } from '../health.js';
|
||||||
import type { TieredHealthMonitor } from '../health/tiered.js';
|
import type { TieredHealthMonitor } from '../health/tiered.js';
|
||||||
|
import type { ProviderRegistry } from '../providers/registry.js';
|
||||||
|
|
||||||
export interface HttpServerDeps {
|
export interface HttpServerDeps {
|
||||||
router: McpRouter;
|
router: McpRouter;
|
||||||
healthMonitor?: HealthMonitor | undefined;
|
healthMonitor?: HealthMonitor | undefined;
|
||||||
tieredHealthMonitor?: TieredHealthMonitor | undefined;
|
tieredHealthMonitor?: TieredHealthMonitor | undefined;
|
||||||
|
providerRegistry?: ProviderRegistry | null | undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function createHttpServer(
|
export async function createHttpServer(
|
||||||
@@ -87,7 +89,7 @@ export async function createHttpServer(
|
|||||||
registerMcpEndpoint(app, deps.router);
|
registerMcpEndpoint(app, deps.router);
|
||||||
|
|
||||||
// Project-scoped MCP endpoint at /projects/:projectName/mcp
|
// Project-scoped MCP endpoint at /projects/:projectName/mcp
|
||||||
registerProjectMcpEndpoint(app, mcpdClient);
|
registerProjectMcpEndpoint(app, mcpdClient, deps.providerRegistry);
|
||||||
|
|
||||||
return app;
|
return app;
|
||||||
}
|
}
|
||||||
|
|||||||
93
src/mcplocal/src/llm-config.ts
Normal file
93
src/mcplocal/src/llm-config.ts
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import type { SecretStore } from '@mcpctl/shared';
|
||||||
|
import type { LlmFileConfig } from './http/config.js';
|
||||||
|
import { ProviderRegistry } from './providers/registry.js';
|
||||||
|
import { GeminiCliProvider } from './providers/gemini-cli.js';
|
||||||
|
import { OllamaProvider } from './providers/ollama.js';
|
||||||
|
import { AnthropicProvider } from './providers/anthropic.js';
|
||||||
|
import { OpenAiProvider } from './providers/openai.js';
|
||||||
|
import { DeepSeekProvider } from './providers/deepseek.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a ProviderRegistry from user config + secret store.
|
||||||
|
* Returns an empty registry if config is undefined or provider is 'none'.
|
||||||
|
*/
|
||||||
|
export async function createProviderFromConfig(
|
||||||
|
config: LlmFileConfig | undefined,
|
||||||
|
secretStore: SecretStore,
|
||||||
|
): Promise<ProviderRegistry> {
|
||||||
|
const registry = new ProviderRegistry();
|
||||||
|
if (!config?.provider || config.provider === 'none') return registry;
|
||||||
|
|
||||||
|
switch (config.provider) {
|
||||||
|
case 'gemini-cli':
|
||||||
|
registry.register(new GeminiCliProvider({
|
||||||
|
binaryPath: config.binaryPath,
|
||||||
|
defaultModel: config.model,
|
||||||
|
}));
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'ollama':
|
||||||
|
registry.register(new OllamaProvider({
|
||||||
|
baseUrl: config.url,
|
||||||
|
defaultModel: config.model,
|
||||||
|
}));
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'anthropic': {
|
||||||
|
const apiKey = await secretStore.get('anthropic-api-key');
|
||||||
|
if (!apiKey) {
|
||||||
|
process.stderr.write('Warning: Anthropic API key not found in secret store. Run "mcpctl config setup" to configure.\n');
|
||||||
|
return registry;
|
||||||
|
}
|
||||||
|
registry.register(new AnthropicProvider({
|
||||||
|
apiKey,
|
||||||
|
defaultModel: config.model,
|
||||||
|
}));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'openai': {
|
||||||
|
const apiKey = await secretStore.get('openai-api-key');
|
||||||
|
if (!apiKey) {
|
||||||
|
process.stderr.write('Warning: OpenAI API key not found in secret store. Run "mcpctl config setup" to configure.\n');
|
||||||
|
return registry;
|
||||||
|
}
|
||||||
|
registry.register(new OpenAiProvider({
|
||||||
|
apiKey,
|
||||||
|
baseUrl: config.url,
|
||||||
|
defaultModel: config.model,
|
||||||
|
}));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'deepseek': {
|
||||||
|
const apiKey = await secretStore.get('deepseek-api-key');
|
||||||
|
if (!apiKey) {
|
||||||
|
process.stderr.write('Warning: DeepSeek API key not found in secret store. Run "mcpctl config setup" to configure.\n');
|
||||||
|
return registry;
|
||||||
|
}
|
||||||
|
registry.register(new DeepSeekProvider({
|
||||||
|
apiKey,
|
||||||
|
baseUrl: config.url,
|
||||||
|
defaultModel: config.model,
|
||||||
|
}));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'vllm': {
|
||||||
|
// vLLM uses OpenAI-compatible API
|
||||||
|
if (!config.url) {
|
||||||
|
process.stderr.write('Warning: vLLM URL not configured. Run "mcpctl config setup" to configure.\n');
|
||||||
|
return registry;
|
||||||
|
}
|
||||||
|
registry.register(new OpenAiProvider({
|
||||||
|
apiKey: 'unused',
|
||||||
|
baseUrl: config.url,
|
||||||
|
defaultModel: config.model ?? 'default',
|
||||||
|
}));
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return registry;
|
||||||
|
}
|
||||||
@@ -6,3 +6,5 @@ export { FilterCache, DEFAULT_FILTER_CACHE_CONFIG } from './filter-cache.js';
|
|||||||
export type { FilterCacheConfig } from './filter-cache.js';
|
export type { FilterCacheConfig } from './filter-cache.js';
|
||||||
export { FilterMetrics } from './metrics.js';
|
export { FilterMetrics } from './metrics.js';
|
||||||
export type { FilterMetricsSnapshot } from './metrics.js';
|
export type { FilterMetricsSnapshot } from './metrics.js';
|
||||||
|
export { ResponsePaginator, DEFAULT_PAGINATION_CONFIG, PAGINATION_INDEX_SYSTEM_PROMPT } from './pagination.js';
|
||||||
|
export type { PaginationConfig, PaginationIndex, PageSummary, PaginatedToolResponse } from './pagination.js';
|
||||||
|
|||||||
354
src/mcplocal/src/llm/pagination.ts
Normal file
354
src/mcplocal/src/llm/pagination.ts
Normal file
@@ -0,0 +1,354 @@
|
|||||||
|
import { randomUUID } from 'node:crypto';
|
||||||
|
import type { ProviderRegistry } from '../providers/registry.js';
|
||||||
|
import { estimateTokens } from './token-counter.js';
|
||||||
|
|
||||||
|
// --- Configuration ---
|
||||||
|
|
||||||
|
export interface PaginationConfig {
|
||||||
|
/** Character threshold above which responses get paginated (default 80_000) */
|
||||||
|
sizeThreshold: number;
|
||||||
|
/** Characters per page (default 40_000) */
|
||||||
|
pageSize: number;
|
||||||
|
/** Max cached results (LRU eviction) (default 64) */
|
||||||
|
maxCachedResults: number;
|
||||||
|
/** TTL for cached results in ms (default 300_000 = 5 min) */
|
||||||
|
ttlMs: number;
|
||||||
|
/** Max tokens for the LLM index generation call (default 2048) */
|
||||||
|
indexMaxTokens: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const DEFAULT_PAGINATION_CONFIG: PaginationConfig = {
|
||||||
|
sizeThreshold: 80_000,
|
||||||
|
pageSize: 40_000,
|
||||||
|
maxCachedResults: 64,
|
||||||
|
ttlMs: 300_000,
|
||||||
|
indexMaxTokens: 2048,
|
||||||
|
};
|
||||||
|
|
||||||
|
// --- Cache Entry ---
|
||||||
|
|
||||||
|
interface PageInfo {
|
||||||
|
/** 0-based page index */
|
||||||
|
index: number;
|
||||||
|
/** Start character offset in the raw string */
|
||||||
|
startChar: number;
|
||||||
|
/** End character offset (exclusive) */
|
||||||
|
endChar: number;
|
||||||
|
/** Approximate token count */
|
||||||
|
estimatedTokens: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CachedResult {
|
||||||
|
resultId: string;
|
||||||
|
toolName: string;
|
||||||
|
raw: string;
|
||||||
|
pages: PageInfo[];
|
||||||
|
index: PaginationIndex;
|
||||||
|
createdAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Index Types ---
|
||||||
|
|
||||||
|
export interface PageSummary {
|
||||||
|
page: number;
|
||||||
|
startChar: number;
|
||||||
|
endChar: number;
|
||||||
|
estimatedTokens: number;
|
||||||
|
summary: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PaginationIndex {
|
||||||
|
resultId: string;
|
||||||
|
toolName: string;
|
||||||
|
totalSize: number;
|
||||||
|
totalTokens: number;
|
||||||
|
totalPages: number;
|
||||||
|
pageSummaries: PageSummary[];
|
||||||
|
indexType: 'smart' | 'simple';
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- The MCP response format ---
|
||||||
|
|
||||||
|
export interface PaginatedToolResponse {
|
||||||
|
content: Array<{
|
||||||
|
type: 'text';
|
||||||
|
text: string;
|
||||||
|
}>;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- LLM Prompt ---
|
||||||
|
|
||||||
|
export const PAGINATION_INDEX_SYSTEM_PROMPT = `You are a document indexing assistant. Given a large tool response split into pages, generate a concise summary for each page describing what data it contains.
|
||||||
|
|
||||||
|
Rules:
|
||||||
|
- For each page, write 1-2 sentences describing the key content
|
||||||
|
- Be specific: mention entity names, IDs, counts, or key fields visible on that page
|
||||||
|
- If it's JSON, describe the structure and notable entries
|
||||||
|
- If it's text, describe the topics covered
|
||||||
|
- Output valid JSON only: an array of objects with "page" (1-based number) and "summary" (string)
|
||||||
|
- Example output: [{"page": 1, "summary": "Configuration nodes and global settings (inject, debug, function nodes 1-15)"}, {"page": 2, "summary": "HTTP request nodes and API integrations (nodes 16-40)"}]`;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handles transparent pagination of large MCP tool responses.
|
||||||
|
*
|
||||||
|
* When a tool response exceeds the size threshold, it is cached and an
|
||||||
|
* index is returned instead. The LLM can then request specific pages
|
||||||
|
* via _page/_resultId parameters on subsequent tool calls.
|
||||||
|
*
|
||||||
|
* If an LLM provider is available, the index includes AI-generated
|
||||||
|
* per-page summaries. Otherwise, simple byte-range descriptions are used.
|
||||||
|
*/
|
||||||
|
export class ResponsePaginator {
|
||||||
|
private cache = new Map<string, CachedResult>();
|
||||||
|
private readonly config: PaginationConfig;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
private providers: ProviderRegistry | null,
|
||||||
|
config: Partial<PaginationConfig> = {},
|
||||||
|
) {
|
||||||
|
this.config = { ...DEFAULT_PAGINATION_CONFIG, ...config };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a raw response string should be paginated.
|
||||||
|
*/
|
||||||
|
shouldPaginate(raw: string): boolean {
|
||||||
|
return raw.length >= this.config.sizeThreshold;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Paginate a large response: cache it and return the index.
|
||||||
|
* Returns null if the response is below threshold.
|
||||||
|
*/
|
||||||
|
async paginate(toolName: string, raw: string): Promise<PaginatedToolResponse | null> {
|
||||||
|
if (!this.shouldPaginate(raw)) return null;
|
||||||
|
|
||||||
|
const resultId = randomUUID();
|
||||||
|
const pages = this.splitPages(raw);
|
||||||
|
let index: PaginationIndex;
|
||||||
|
|
||||||
|
try {
|
||||||
|
index = await this.generateSmartIndex(resultId, toolName, raw, pages);
|
||||||
|
} catch {
|
||||||
|
index = this.generateSimpleIndex(resultId, toolName, raw, pages);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store in cache
|
||||||
|
this.evictExpired();
|
||||||
|
this.evictLRU();
|
||||||
|
this.cache.set(resultId, {
|
||||||
|
resultId,
|
||||||
|
toolName,
|
||||||
|
raw,
|
||||||
|
pages,
|
||||||
|
index,
|
||||||
|
createdAt: Date.now(),
|
||||||
|
});
|
||||||
|
|
||||||
|
return this.formatIndexResponse(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Serve a specific page from cache.
|
||||||
|
* Returns null if the resultId is not found (cache miss / expired).
|
||||||
|
*/
|
||||||
|
getPage(resultId: string, page: number | 'all'): PaginatedToolResponse | null {
|
||||||
|
this.evictExpired();
|
||||||
|
const entry = this.cache.get(resultId);
|
||||||
|
if (!entry) return null;
|
||||||
|
|
||||||
|
if (page === 'all') {
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text', text: entry.raw }],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pages are 1-based in the API
|
||||||
|
const pageInfo = entry.pages[page - 1];
|
||||||
|
if (!pageInfo) {
|
||||||
|
return {
|
||||||
|
content: [{
|
||||||
|
type: 'text',
|
||||||
|
text: `Error: page ${String(page)} is out of range. This result has ${String(entry.pages.length)} pages (1-${String(entry.pages.length)}).`,
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const pageContent = entry.raw.slice(pageInfo.startChar, pageInfo.endChar);
|
||||||
|
return {
|
||||||
|
content: [{
|
||||||
|
type: 'text',
|
||||||
|
text: `[Page ${String(page)}/${String(entry.pages.length)} of result ${resultId}]\n\n${pageContent}`,
|
||||||
|
}],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a tool call has pagination parameters (_page / _resultId).
|
||||||
|
* Returns the parsed pagination request, or null if not a pagination request.
|
||||||
|
*/
|
||||||
|
static extractPaginationParams(
|
||||||
|
args: Record<string, unknown>,
|
||||||
|
): { resultId: string; page: number | 'all' } | null {
|
||||||
|
const resultId = args['_resultId'];
|
||||||
|
const pageParam = args['_page'];
|
||||||
|
if (typeof resultId !== 'string' || pageParam === undefined) return null;
|
||||||
|
|
||||||
|
if (pageParam === 'all') return { resultId, page: 'all' };
|
||||||
|
|
||||||
|
const page = Number(pageParam);
|
||||||
|
if (!Number.isInteger(page) || page < 1) return null;
|
||||||
|
|
||||||
|
return { resultId, page };
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Private methods ---
|
||||||
|
|
||||||
|
private splitPages(raw: string): PageInfo[] {
|
||||||
|
const pages: PageInfo[] = [];
|
||||||
|
let offset = 0;
|
||||||
|
let pageIndex = 0;
|
||||||
|
|
||||||
|
while (offset < raw.length) {
|
||||||
|
const end = Math.min(offset + this.config.pageSize, raw.length);
|
||||||
|
// Try to break at a newline boundary if we're not at the end
|
||||||
|
let breakAt = end;
|
||||||
|
if (end < raw.length) {
|
||||||
|
const lastNewline = raw.lastIndexOf('\n', end);
|
||||||
|
if (lastNewline > offset) {
|
||||||
|
breakAt = lastNewline + 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pages.push({
|
||||||
|
index: pageIndex,
|
||||||
|
startChar: offset,
|
||||||
|
endChar: breakAt,
|
||||||
|
estimatedTokens: estimateTokens(raw.slice(offset, breakAt)),
|
||||||
|
});
|
||||||
|
|
||||||
|
offset = breakAt;
|
||||||
|
pageIndex++;
|
||||||
|
}
|
||||||
|
|
||||||
|
return pages;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async generateSmartIndex(
|
||||||
|
resultId: string,
|
||||||
|
toolName: string,
|
||||||
|
raw: string,
|
||||||
|
pages: PageInfo[],
|
||||||
|
): Promise<PaginationIndex> {
|
||||||
|
const provider = this.providers?.getActive();
|
||||||
|
if (!provider) {
|
||||||
|
return this.generateSimpleIndex(resultId, toolName, raw, pages);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build a prompt with page previews (first ~500 chars of each page)
|
||||||
|
const previews = pages.map((p, i) => {
|
||||||
|
const preview = raw.slice(p.startChar, Math.min(p.startChar + 500, p.endChar));
|
||||||
|
const truncated = p.endChar - p.startChar > 500 ? '\n[...]' : '';
|
||||||
|
return `--- Page ${String(i + 1)} (chars ${String(p.startChar)}-${String(p.endChar)}, ~${String(p.estimatedTokens)} tokens) ---\n${preview}${truncated}`;
|
||||||
|
}).join('\n\n');
|
||||||
|
|
||||||
|
const result = await provider.complete({
|
||||||
|
messages: [
|
||||||
|
{ role: 'system', content: PAGINATION_INDEX_SYSTEM_PROMPT },
|
||||||
|
{ role: 'user', content: `Tool: ${toolName}\nTotal size: ${String(raw.length)} chars, ${String(pages.length)} pages\n\n${previews}` },
|
||||||
|
],
|
||||||
|
maxTokens: this.config.indexMaxTokens,
|
||||||
|
temperature: 0,
|
||||||
|
});
|
||||||
|
|
||||||
|
const summaries = JSON.parse(result.content) as Array<{ page: number; summary: string }>;
|
||||||
|
|
||||||
|
return {
|
||||||
|
resultId,
|
||||||
|
toolName,
|
||||||
|
totalSize: raw.length,
|
||||||
|
totalTokens: estimateTokens(raw),
|
||||||
|
totalPages: pages.length,
|
||||||
|
indexType: 'smart',
|
||||||
|
pageSummaries: pages.map((p, i) => ({
|
||||||
|
page: i + 1,
|
||||||
|
startChar: p.startChar,
|
||||||
|
endChar: p.endChar,
|
||||||
|
estimatedTokens: p.estimatedTokens,
|
||||||
|
summary: summaries.find((s) => s.page === i + 1)?.summary ?? `Page ${String(i + 1)}`,
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private generateSimpleIndex(
|
||||||
|
resultId: string,
|
||||||
|
toolName: string,
|
||||||
|
raw: string,
|
||||||
|
pages: PageInfo[],
|
||||||
|
): PaginationIndex {
|
||||||
|
return {
|
||||||
|
resultId,
|
||||||
|
toolName,
|
||||||
|
totalSize: raw.length,
|
||||||
|
totalTokens: estimateTokens(raw),
|
||||||
|
totalPages: pages.length,
|
||||||
|
indexType: 'simple',
|
||||||
|
pageSummaries: pages.map((p, i) => ({
|
||||||
|
page: i + 1,
|
||||||
|
startChar: p.startChar,
|
||||||
|
endChar: p.endChar,
|
||||||
|
estimatedTokens: p.estimatedTokens,
|
||||||
|
summary: `Page ${String(i + 1)}: characters ${String(p.startChar)}-${String(p.endChar)} (~${String(p.estimatedTokens)} tokens)`,
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private formatIndexResponse(index: PaginationIndex): PaginatedToolResponse {
|
||||||
|
const lines = [
|
||||||
|
`This response is too large to return directly (${String(index.totalSize)} chars, ~${String(index.totalTokens)} tokens).`,
|
||||||
|
`It has been split into ${String(index.totalPages)} pages.`,
|
||||||
|
'',
|
||||||
|
'To retrieve a specific page, call this same tool again with additional arguments:',
|
||||||
|
` "_resultId": "${index.resultId}"`,
|
||||||
|
` "_page": <page_number> (1-${String(index.totalPages)})`,
|
||||||
|
' "_page": "all" (returns the full response)',
|
||||||
|
'',
|
||||||
|
`--- Page Index${index.indexType === 'smart' ? ' (AI-generated summaries)' : ''} ---`,
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const page of index.pageSummaries) {
|
||||||
|
lines.push(` Page ${String(page.page)}: ${page.summary}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: [{ type: 'text', text: lines.join('\n') }],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private evictExpired(): void {
|
||||||
|
const now = Date.now();
|
||||||
|
for (const [id, entry] of this.cache) {
|
||||||
|
if (now - entry.createdAt > this.config.ttlMs) {
|
||||||
|
this.cache.delete(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private evictLRU(): void {
|
||||||
|
while (this.cache.size >= this.config.maxCachedResults) {
|
||||||
|
const oldest = this.cache.keys().next();
|
||||||
|
if (oldest.done) break;
|
||||||
|
this.cache.delete(oldest.value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Exposed for testing. */
|
||||||
|
get cacheSize(): number {
|
||||||
|
return this.cache.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Clear all cached results. */
|
||||||
|
clearCache(): void {
|
||||||
|
this.cache.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -7,8 +7,11 @@ import { StdioProxyServer } from './server.js';
|
|||||||
import { StdioUpstream } from './upstream/stdio.js';
|
import { StdioUpstream } from './upstream/stdio.js';
|
||||||
import { HttpUpstream } from './upstream/http.js';
|
import { HttpUpstream } from './upstream/http.js';
|
||||||
import { createHttpServer } from './http/server.js';
|
import { createHttpServer } from './http/server.js';
|
||||||
import { loadHttpConfig } from './http/config.js';
|
import { loadHttpConfig, loadLlmConfig } from './http/config.js';
|
||||||
import type { HttpConfig } from './http/config.js';
|
import type { HttpConfig } from './http/config.js';
|
||||||
|
import { createProviderFromConfig } from './llm-config.js';
|
||||||
|
import { createSecretStore } from '@mcpctl/shared';
|
||||||
|
import type { ProviderRegistry } from './providers/registry.js';
|
||||||
|
|
||||||
interface ParsedArgs {
|
interface ParsedArgs {
|
||||||
configPath: string | undefined;
|
configPath: string | undefined;
|
||||||
@@ -55,12 +58,22 @@ export interface MainResult {
|
|||||||
server: StdioProxyServer;
|
server: StdioProxyServer;
|
||||||
httpServer: FastifyInstance | undefined;
|
httpServer: FastifyInstance | undefined;
|
||||||
httpConfig: HttpConfig;
|
httpConfig: HttpConfig;
|
||||||
|
providerRegistry: ProviderRegistry;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function main(argv: string[] = process.argv): Promise<MainResult> {
|
export async function main(argv: string[] = process.argv): Promise<MainResult> {
|
||||||
const args = parseArgs(argv);
|
const args = parseArgs(argv);
|
||||||
const httpConfig = loadHttpConfig();
|
const httpConfig = loadHttpConfig();
|
||||||
|
|
||||||
|
// Load LLM provider from user config + secret store
|
||||||
|
const llmConfig = loadLlmConfig();
|
||||||
|
const secretStore = await createSecretStore();
|
||||||
|
const providerRegistry = await createProviderFromConfig(llmConfig, secretStore);
|
||||||
|
const activeLlm = providerRegistry.getActive();
|
||||||
|
if (activeLlm) {
|
||||||
|
process.stderr.write(`LLM provider: ${activeLlm.name}\n`);
|
||||||
|
}
|
||||||
|
|
||||||
let upstreamConfigs: UpstreamConfig[] = [];
|
let upstreamConfigs: UpstreamConfig[] = [];
|
||||||
|
|
||||||
if (args.configPath) {
|
if (args.configPath) {
|
||||||
@@ -115,7 +128,7 @@ export async function main(argv: string[] = process.argv): Promise<MainResult> {
|
|||||||
// Start HTTP server unless disabled
|
// Start HTTP server unless disabled
|
||||||
let httpServer: FastifyInstance | undefined;
|
let httpServer: FastifyInstance | undefined;
|
||||||
if (!args.noHttp) {
|
if (!args.noHttp) {
|
||||||
httpServer = await createHttpServer(httpConfig, { router });
|
httpServer = await createHttpServer(httpConfig, { router, providerRegistry });
|
||||||
await httpServer.listen({ port: httpConfig.httpPort, host: httpConfig.httpHost });
|
await httpServer.listen({ port: httpConfig.httpPort, host: httpConfig.httpHost });
|
||||||
process.stderr.write(`mcpctl-proxy HTTP server listening on ${httpConfig.httpHost}:${httpConfig.httpPort}\n`);
|
process.stderr.write(`mcpctl-proxy HTTP server listening on ${httpConfig.httpHost}:${httpConfig.httpPort}\n`);
|
||||||
}
|
}
|
||||||
@@ -137,7 +150,7 @@ export async function main(argv: string[] = process.argv): Promise<MainResult> {
|
|||||||
process.on('SIGTERM', () => void shutdown());
|
process.on('SIGTERM', () => void shutdown());
|
||||||
process.on('SIGINT', () => void shutdown());
|
process.on('SIGINT', () => void shutdown());
|
||||||
|
|
||||||
return { router, server, httpServer, httpConfig };
|
return { router, server, httpServer, httpConfig, providerRegistry };
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run when executed directly
|
// Run when executed directly
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import type { UpstreamConnection, JsonRpcRequest, JsonRpcResponse, JsonRpcNotification } from './types.js';
|
import type { UpstreamConnection, JsonRpcRequest, JsonRpcResponse, JsonRpcNotification } from './types.js';
|
||||||
import type { LlmProcessor } from './llm/processor.js';
|
import type { LlmProcessor } from './llm/processor.js';
|
||||||
|
import { ResponsePaginator } from './llm/pagination.js';
|
||||||
import type { McpdClient } from './http/mcpd-client.js';
|
import type { McpdClient } from './http/mcpd-client.js';
|
||||||
|
|
||||||
export interface RouteContext {
|
export interface RouteContext {
|
||||||
@@ -26,6 +27,11 @@ export class McpRouter {
|
|||||||
private mcpdClient: McpdClient | null = null;
|
private mcpdClient: McpdClient | null = null;
|
||||||
private projectName: string | null = null;
|
private projectName: string | null = null;
|
||||||
private mcpctlResourceContents = new Map<string, string>();
|
private mcpctlResourceContents = new Map<string, string>();
|
||||||
|
private paginator: ResponsePaginator | null = null;
|
||||||
|
|
||||||
|
setPaginator(paginator: ResponsePaginator): void {
|
||||||
|
this.paginator = paginator;
|
||||||
|
}
|
||||||
|
|
||||||
setLlmProcessor(processor: LlmProcessor): void {
|
setLlmProcessor(processor: LlmProcessor): void {
|
||||||
this.llmProcessor = processor;
|
this.llmProcessor = processor;
|
||||||
@@ -367,6 +373,17 @@ export class McpRouter {
|
|||||||
case 'prompts/get':
|
case 'prompts/get':
|
||||||
return this.routeNamespacedCall(request, 'name', this.promptToServer);
|
return this.routeNamespacedCall(request, 'name', this.promptToServer);
|
||||||
|
|
||||||
|
// Handle MCP notifications (no response expected, but return empty result if called as request)
|
||||||
|
case 'notifications/initialized':
|
||||||
|
case 'notifications/cancelled':
|
||||||
|
case 'notifications/progress':
|
||||||
|
case 'notifications/roots/list_changed':
|
||||||
|
return {
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: request.id,
|
||||||
|
result: {},
|
||||||
|
};
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return {
|
return {
|
||||||
jsonrpc: '2.0',
|
jsonrpc: '2.0',
|
||||||
@@ -388,14 +405,36 @@ export class McpRouter {
|
|||||||
return this.handleProposePrompt(request, context);
|
return this.handleProposePrompt(request, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Intercept pagination page requests before routing to upstream
|
||||||
|
const toolArgs = (params?.['arguments'] ?? {}) as Record<string, unknown>;
|
||||||
|
if (this.paginator) {
|
||||||
|
const paginationReq = ResponsePaginator.extractPaginationParams(toolArgs);
|
||||||
|
if (paginationReq) {
|
||||||
|
const pageResult = this.paginator.getPage(paginationReq.resultId, paginationReq.page);
|
||||||
|
if (pageResult) {
|
||||||
|
return { jsonrpc: '2.0', id: request.id, result: pageResult };
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: request.id,
|
||||||
|
result: {
|
||||||
|
content: [{
|
||||||
|
type: 'text',
|
||||||
|
text: 'Cached result not found (expired or invalid _resultId). Please re-call the tool without _resultId/_page to get a fresh result.',
|
||||||
|
}],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// If no processor or tool shouldn't be processed, route directly
|
// If no processor or tool shouldn't be processed, route directly
|
||||||
if (!this.llmProcessor || !toolName || !this.llmProcessor.shouldProcess('tools/call', toolName)) {
|
if (!this.llmProcessor || !toolName || !this.llmProcessor.shouldProcess('tools/call', toolName)) {
|
||||||
return this.routeNamespacedCall(request, 'name', this.toolToServer);
|
const response = await this.routeNamespacedCall(request, 'name', this.toolToServer);
|
||||||
|
return this.maybePaginate(toolName, response);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Preprocess request params
|
// Preprocess request params
|
||||||
const toolParams = (params?.['arguments'] ?? {}) as Record<string, unknown>;
|
const processed = await this.llmProcessor.preprocessRequest(toolName, toolArgs);
|
||||||
const processed = await this.llmProcessor.preprocessRequest(toolName, toolParams);
|
|
||||||
const processedRequest: JsonRpcRequest = processed.optimized
|
const processedRequest: JsonRpcRequest = processed.optimized
|
||||||
? { ...request, params: { ...params, arguments: processed.params } }
|
? { ...request, params: { ...params, arguments: processed.params } }
|
||||||
: request;
|
: request;
|
||||||
@@ -403,6 +442,10 @@ export class McpRouter {
|
|||||||
// Route to upstream
|
// Route to upstream
|
||||||
const response = await this.routeNamespacedCall(processedRequest, 'name', this.toolToServer);
|
const response = await this.routeNamespacedCall(processedRequest, 'name', this.toolToServer);
|
||||||
|
|
||||||
|
// Paginate if response is large (skip LLM filtering for paginated responses)
|
||||||
|
const paginated = await this.maybePaginate(toolName, response);
|
||||||
|
if (paginated !== response) return paginated;
|
||||||
|
|
||||||
// Filter response
|
// Filter response
|
||||||
if (response.error) return response;
|
if (response.error) return response;
|
||||||
const filtered = await this.llmProcessor.filterResponse(toolName, response);
|
const filtered = await this.llmProcessor.filterResponse(toolName, response);
|
||||||
@@ -412,6 +455,21 @@ export class McpRouter {
|
|||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If the response is large enough, paginate it and return the index instead.
|
||||||
|
*/
|
||||||
|
private async maybePaginate(toolName: string | undefined, response: JsonRpcResponse): Promise<JsonRpcResponse> {
|
||||||
|
if (!this.paginator || !toolName || response.error) return response;
|
||||||
|
|
||||||
|
const raw = JSON.stringify(response.result);
|
||||||
|
if (!this.paginator.shouldPaginate(raw)) return response;
|
||||||
|
|
||||||
|
const paginated = await this.paginator.paginate(toolName, raw);
|
||||||
|
if (!paginated) return response;
|
||||||
|
|
||||||
|
return { jsonrpc: '2.0', id: response.id, result: paginated };
|
||||||
|
}
|
||||||
|
|
||||||
private async handleProposePrompt(request: JsonRpcRequest, context?: RouteContext): Promise<JsonRpcResponse> {
|
private async handleProposePrompt(request: JsonRpcRequest, context?: RouteContext): Promise<JsonRpcResponse> {
|
||||||
if (!this.mcpdClient || !this.projectName) {
|
if (!this.mcpdClient || !this.projectName) {
|
||||||
return {
|
return {
|
||||||
|
|||||||
65
src/mcplocal/tests/http/config.test.ts
Normal file
65
src/mcplocal/tests/http/config.test.ts
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import { describe, it, expect, vi, afterEach } from 'vitest';
|
||||||
|
import { loadLlmConfig } from '../../src/http/config.js';
|
||||||
|
import { existsSync, readFileSync } from 'node:fs';
|
||||||
|
|
||||||
|
vi.mock('node:fs', async () => {
|
||||||
|
const actual = await vi.importActual<typeof import('node:fs')>('node:fs');
|
||||||
|
return {
|
||||||
|
...actual,
|
||||||
|
existsSync: vi.fn(),
|
||||||
|
readFileSync: vi.fn(),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('loadLlmConfig', () => {
|
||||||
|
it('returns undefined when config file does not exist', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(false);
|
||||||
|
expect(loadLlmConfig()).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns undefined when config has no llm section', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(true);
|
||||||
|
vi.mocked(readFileSync).mockReturnValue(JSON.stringify({ mcplocalUrl: 'http://localhost:3200' }));
|
||||||
|
expect(loadLlmConfig()).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns undefined when provider is none', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(true);
|
||||||
|
vi.mocked(readFileSync).mockReturnValue(JSON.stringify({ llm: { provider: 'none' } }));
|
||||||
|
expect(loadLlmConfig()).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns LLM config when provider is configured', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(true);
|
||||||
|
vi.mocked(readFileSync).mockReturnValue(JSON.stringify({
|
||||||
|
llm: { provider: 'anthropic', model: 'claude-haiku-3-5-20241022' },
|
||||||
|
}));
|
||||||
|
const result = loadLlmConfig();
|
||||||
|
expect(result).toEqual({ provider: 'anthropic', model: 'claude-haiku-3-5-20241022' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns full LLM config with all fields', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(true);
|
||||||
|
vi.mocked(readFileSync).mockReturnValue(JSON.stringify({
|
||||||
|
llm: { provider: 'vllm', model: 'my-model', url: 'http://gpu:8000' },
|
||||||
|
}));
|
||||||
|
const result = loadLlmConfig();
|
||||||
|
expect(result).toEqual({ provider: 'vllm', model: 'my-model', url: 'http://gpu:8000' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns undefined on malformed JSON', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(true);
|
||||||
|
vi.mocked(readFileSync).mockReturnValue('NOT JSON!!!');
|
||||||
|
expect(loadLlmConfig()).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns undefined on read error', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(true);
|
||||||
|
vi.mocked(readFileSync).mockImplementation(() => { throw new Error('EACCES'); });
|
||||||
|
expect(loadLlmConfig()).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
133
src/mcplocal/tests/llm-config.test.ts
Normal file
133
src/mcplocal/tests/llm-config.test.ts
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
import { describe, it, expect, vi } from 'vitest';
|
||||||
|
import { createProviderFromConfig } from '../src/llm-config.js';
|
||||||
|
import type { SecretStore } from '@mcpctl/shared';
|
||||||
|
|
||||||
|
function mockSecretStore(secrets: Record<string, string> = {}): SecretStore {
|
||||||
|
return {
|
||||||
|
get: vi.fn(async (key: string) => secrets[key] ?? null),
|
||||||
|
set: vi.fn(async () => {}),
|
||||||
|
delete: vi.fn(async () => true),
|
||||||
|
backend: () => 'mock',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('createProviderFromConfig', () => {
|
||||||
|
it('returns empty registry for undefined config', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const registry = await createProviderFromConfig(undefined, store);
|
||||||
|
expect(registry.getActive()).toBeNull();
|
||||||
|
expect(registry.list()).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty registry for provider=none', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const registry = await createProviderFromConfig({ provider: 'none' }, store);
|
||||||
|
expect(registry.getActive()).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates gemini-cli provider', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'gemini-cli', model: 'gemini-2.5-flash', binaryPath: '/usr/bin/gemini' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()).not.toBeNull();
|
||||||
|
expect(registry.getActive()!.name).toBe('gemini-cli');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates ollama provider', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'ollama', model: 'llama3.2', url: 'http://localhost:11434' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()!.name).toBe('ollama');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates anthropic provider with API key from secret store', async () => {
|
||||||
|
const store = mockSecretStore({ 'anthropic-api-key': 'sk-ant-test' });
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'anthropic', model: 'claude-haiku-3-5-20241022' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()!.name).toBe('anthropic');
|
||||||
|
expect(store.get).toHaveBeenCalledWith('anthropic-api-key');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty registry when anthropic API key is missing', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true);
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'anthropic', model: 'claude-haiku-3-5-20241022' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()).toBeNull();
|
||||||
|
expect(stderrSpy).toHaveBeenCalledWith(expect.stringContaining('Anthropic API key not found'));
|
||||||
|
stderrSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates openai provider with API key from secret store', async () => {
|
||||||
|
const store = mockSecretStore({ 'openai-api-key': 'sk-test' });
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'openai', model: 'gpt-4o', url: 'https://api.openai.com' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()!.name).toBe('openai');
|
||||||
|
expect(store.get).toHaveBeenCalledWith('openai-api-key');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty registry when openai API key is missing', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true);
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'openai' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()).toBeNull();
|
||||||
|
stderrSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates deepseek provider with API key from secret store', async () => {
|
||||||
|
const store = mockSecretStore({ 'deepseek-api-key': 'sk-ds-test' });
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'deepseek', model: 'deepseek-chat' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()!.name).toBe('deepseek');
|
||||||
|
expect(store.get).toHaveBeenCalledWith('deepseek-api-key');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty registry when deepseek API key is missing', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true);
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'deepseek' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()).toBeNull();
|
||||||
|
stderrSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates vllm provider using OpenAI provider', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'vllm', model: 'my-model', url: 'http://gpu-server:8000' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
// vLLM reuses OpenAI provider under the hood
|
||||||
|
expect(registry.getActive()).not.toBeNull();
|
||||||
|
expect(registry.getActive()!.name).toBe('openai');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty registry when vllm URL is missing', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true);
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'vllm' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()).toBeNull();
|
||||||
|
expect(stderrSpy).toHaveBeenCalledWith(expect.stringContaining('vLLM URL not configured'));
|
||||||
|
stderrSpy.mockRestore();
|
||||||
|
});
|
||||||
|
});
|
||||||
433
src/mcplocal/tests/pagination.test.ts
Normal file
433
src/mcplocal/tests/pagination.test.ts
Normal file
@@ -0,0 +1,433 @@
|
|||||||
|
import { describe, it, expect, vi, afterEach } from 'vitest';
|
||||||
|
import { ResponsePaginator, DEFAULT_PAGINATION_CONFIG } from '../src/llm/pagination.js';
|
||||||
|
import type { ProviderRegistry } from '../src/providers/registry.js';
|
||||||
|
import type { LlmProvider } from '../src/providers/types.js';
|
||||||
|
|
||||||
|
function makeProvider(response: string): ProviderRegistry {
|
||||||
|
const provider: LlmProvider = {
|
||||||
|
name: 'test',
|
||||||
|
isAvailable: () => true,
|
||||||
|
complete: vi.fn().mockResolvedValue({ content: response }),
|
||||||
|
};
|
||||||
|
return {
|
||||||
|
getActive: () => provider,
|
||||||
|
register: vi.fn(),
|
||||||
|
setActive: vi.fn(),
|
||||||
|
listProviders: () => [{ name: 'test', available: true, active: true }],
|
||||||
|
} as unknown as ProviderRegistry;
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeLargeString(size: number, pattern = 'x'): string {
|
||||||
|
return pattern.repeat(size);
|
||||||
|
}
|
||||||
|
|
||||||
|
function makeLargeStringWithNewlines(size: number, lineLen = 100): string {
|
||||||
|
const lines: string[] = [];
|
||||||
|
let total = 0;
|
||||||
|
let lineNum = 0;
|
||||||
|
while (total < size) {
|
||||||
|
const line = `line-${String(lineNum).padStart(5, '0')} ${'x'.repeat(lineLen - 15)}`;
|
||||||
|
lines.push(line);
|
||||||
|
total += line.length + 1; // +1 for newline
|
||||||
|
lineNum++;
|
||||||
|
}
|
||||||
|
return lines.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('ResponsePaginator', () => {
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- shouldPaginate ---
|
||||||
|
|
||||||
|
describe('shouldPaginate', () => {
|
||||||
|
it('returns false for strings below threshold', () => {
|
||||||
|
const paginator = new ResponsePaginator(null);
|
||||||
|
expect(paginator.shouldPaginate('short string')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false for strings just below threshold', () => {
|
||||||
|
const paginator = new ResponsePaginator(null);
|
||||||
|
const str = makeLargeString(DEFAULT_PAGINATION_CONFIG.sizeThreshold - 1);
|
||||||
|
expect(paginator.shouldPaginate(str)).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns true for strings at threshold', () => {
|
||||||
|
const paginator = new ResponsePaginator(null);
|
||||||
|
const str = makeLargeString(DEFAULT_PAGINATION_CONFIG.sizeThreshold);
|
||||||
|
expect(paginator.shouldPaginate(str)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns true for strings above threshold', () => {
|
||||||
|
const paginator = new ResponsePaginator(null);
|
||||||
|
const str = makeLargeString(DEFAULT_PAGINATION_CONFIG.sizeThreshold + 1000);
|
||||||
|
expect(paginator.shouldPaginate(str)).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('respects custom threshold', () => {
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100 });
|
||||||
|
expect(paginator.shouldPaginate('x'.repeat(99))).toBe(false);
|
||||||
|
expect(paginator.shouldPaginate('x'.repeat(100))).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- paginate (no LLM) ---
|
||||||
|
|
||||||
|
describe('paginate without LLM', () => {
|
||||||
|
it('returns null for small responses', async () => {
|
||||||
|
const paginator = new ResponsePaginator(null);
|
||||||
|
const result = await paginator.paginate('test/tool', 'small response');
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('paginates large responses with simple index', async () => {
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50 });
|
||||||
|
const raw = makeLargeStringWithNewlines(200);
|
||||||
|
const result = await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
expect(result).not.toBeNull();
|
||||||
|
expect(result!.content).toHaveLength(1);
|
||||||
|
expect(result!.content[0]!.type).toBe('text');
|
||||||
|
|
||||||
|
const text = result!.content[0]!.text;
|
||||||
|
expect(text).toContain('too large to return directly');
|
||||||
|
expect(text).toContain('_resultId');
|
||||||
|
expect(text).toContain('_page');
|
||||||
|
expect(text).not.toContain('AI-generated summaries');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('includes correct page count in index', async () => {
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50 });
|
||||||
|
const raw = 'x'.repeat(200);
|
||||||
|
const result = await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
expect(result).not.toBeNull();
|
||||||
|
const text = result!.content[0]!.text;
|
||||||
|
// 200 chars / 50 per page = 4 pages
|
||||||
|
expect(text).toContain('4 pages');
|
||||||
|
expect(text).toContain('Page 1:');
|
||||||
|
expect(text).toContain('Page 4:');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('caches the result for later page retrieval', async () => {
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50 });
|
||||||
|
const raw = 'x'.repeat(200);
|
||||||
|
await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
expect(paginator.cacheSize).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('includes page instructions with _resultId and _page', async () => {
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50 });
|
||||||
|
const raw = 'x'.repeat(200);
|
||||||
|
const result = await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
const text = result!.content[0]!.text;
|
||||||
|
expect(text).toContain('"_resultId"');
|
||||||
|
expect(text).toContain('"_page"');
|
||||||
|
expect(text).toContain('"all"');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- paginate (with LLM) ---
|
||||||
|
|
||||||
|
describe('paginate with LLM', () => {
|
||||||
|
it('generates smart index when provider available', async () => {
|
||||||
|
const summaries = JSON.stringify([
|
||||||
|
{ page: 1, summary: 'Configuration nodes and global settings' },
|
||||||
|
{ page: 2, summary: 'HTTP request nodes and API integrations' },
|
||||||
|
]);
|
||||||
|
const registry = makeProvider(summaries);
|
||||||
|
const paginator = new ResponsePaginator(registry, { sizeThreshold: 100, pageSize: 60 });
|
||||||
|
const raw = makeLargeStringWithNewlines(150);
|
||||||
|
const result = await paginator.paginate('node-red/get_flows', raw);
|
||||||
|
|
||||||
|
expect(result).not.toBeNull();
|
||||||
|
const text = result!.content[0]!.text;
|
||||||
|
expect(text).toContain('AI-generated summaries');
|
||||||
|
expect(text).toContain('Configuration nodes and global settings');
|
||||||
|
expect(text).toContain('HTTP request nodes and API integrations');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('falls back to simple index on LLM failure', async () => {
|
||||||
|
const provider: LlmProvider = {
|
||||||
|
name: 'test',
|
||||||
|
isAvailable: () => true,
|
||||||
|
complete: vi.fn().mockRejectedValue(new Error('LLM unavailable')),
|
||||||
|
};
|
||||||
|
const registry = {
|
||||||
|
getActive: () => provider,
|
||||||
|
register: vi.fn(),
|
||||||
|
setActive: vi.fn(),
|
||||||
|
listProviders: () => [{ name: 'test', available: true, active: true }],
|
||||||
|
} as unknown as ProviderRegistry;
|
||||||
|
|
||||||
|
const paginator = new ResponsePaginator(registry, { sizeThreshold: 100, pageSize: 50 });
|
||||||
|
const raw = 'x'.repeat(200);
|
||||||
|
const result = await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
expect(result).not.toBeNull();
|
||||||
|
const text = result!.content[0]!.text;
|
||||||
|
// Should NOT contain AI-generated label
|
||||||
|
expect(text).not.toContain('AI-generated summaries');
|
||||||
|
expect(text).toContain('Page 1:');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('sends page previews to LLM, not full content', async () => {
|
||||||
|
const completeFn = vi.fn().mockResolvedValue({
|
||||||
|
content: JSON.stringify([
|
||||||
|
{ page: 1, summary: 'test' },
|
||||||
|
{ page: 2, summary: 'test2' },
|
||||||
|
{ page: 3, summary: 'test3' },
|
||||||
|
]),
|
||||||
|
});
|
||||||
|
const provider: LlmProvider = {
|
||||||
|
name: 'test',
|
||||||
|
isAvailable: () => true,
|
||||||
|
complete: completeFn,
|
||||||
|
};
|
||||||
|
const registry = {
|
||||||
|
getActive: () => provider,
|
||||||
|
register: vi.fn(),
|
||||||
|
setActive: vi.fn(),
|
||||||
|
listProviders: () => [{ name: 'test', available: true, active: true }],
|
||||||
|
} as unknown as ProviderRegistry;
|
||||||
|
|
||||||
|
// Use a large enough string (3000 chars, pages of 1000) so previews (500 per page) are smaller than raw
|
||||||
|
const paginator = new ResponsePaginator(registry, { sizeThreshold: 2000, pageSize: 1000 });
|
||||||
|
const raw = makeLargeStringWithNewlines(3000);
|
||||||
|
await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
expect(completeFn).toHaveBeenCalledOnce();
|
||||||
|
const call = completeFn.mock.calls[0]![0]!;
|
||||||
|
const userMsg = call.messages.find((m: { role: string }) => m.role === 'user');
|
||||||
|
// Should contain page preview markers
|
||||||
|
expect(userMsg.content).toContain('Page 1');
|
||||||
|
// The LLM prompt should be significantly smaller than the full content
|
||||||
|
// (each page sends ~500 chars preview, not full 1000 chars)
|
||||||
|
expect(userMsg.content.length).toBeLessThan(raw.length);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('falls back to simple when no active provider', async () => {
|
||||||
|
const registry = {
|
||||||
|
getActive: () => null,
|
||||||
|
register: vi.fn(),
|
||||||
|
setActive: vi.fn(),
|
||||||
|
listProviders: () => [],
|
||||||
|
} as unknown as ProviderRegistry;
|
||||||
|
|
||||||
|
const paginator = new ResponsePaginator(registry, { sizeThreshold: 100, pageSize: 50 });
|
||||||
|
const raw = 'x'.repeat(200);
|
||||||
|
const result = await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
expect(result).not.toBeNull();
|
||||||
|
const text = result!.content[0]!.text;
|
||||||
|
expect(text).not.toContain('AI-generated summaries');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- getPage ---
|
||||||
|
|
||||||
|
describe('getPage', () => {
|
||||||
|
it('returns specific page content', async () => {
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50 });
|
||||||
|
const raw = 'AAAA'.repeat(25) + 'BBBB'.repeat(25); // 200 chars total
|
||||||
|
await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
// Extract resultId from cache (there should be exactly 1 entry)
|
||||||
|
expect(paginator.cacheSize).toBe(1);
|
||||||
|
|
||||||
|
// We need the resultId — get it from the index response
|
||||||
|
const indexResult = await paginator.paginate('test/tool2', 'C'.repeat(200));
|
||||||
|
const text = indexResult!.content[0]!.text;
|
||||||
|
const match = /"_resultId": "([^"]+)"/.exec(text);
|
||||||
|
expect(match).not.toBeNull();
|
||||||
|
const resultId = match![1]!;
|
||||||
|
|
||||||
|
const page1 = paginator.getPage(resultId, 1);
|
||||||
|
expect(page1).not.toBeNull();
|
||||||
|
expect(page1!.content[0]!.text).toContain('Page 1/');
|
||||||
|
expect(page1!.content[0]!.text).toContain('C');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns full content with _page=all', async () => {
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50 });
|
||||||
|
const raw = 'D'.repeat(200);
|
||||||
|
const indexResult = await paginator.paginate('test/tool', raw);
|
||||||
|
const match = /"_resultId": "([^"]+)"/.exec(indexResult!.content[0]!.text);
|
||||||
|
const resultId = match![1]!;
|
||||||
|
|
||||||
|
const allPages = paginator.getPage(resultId, 'all');
|
||||||
|
expect(allPages).not.toBeNull();
|
||||||
|
expect(allPages!.content[0]!.text).toBe(raw);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns null for unknown resultId (cache miss)', () => {
|
||||||
|
const paginator = new ResponsePaginator(null);
|
||||||
|
const result = paginator.getPage('nonexistent-id', 1);
|
||||||
|
expect(result).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns error for out-of-range page', async () => {
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50 });
|
||||||
|
const raw = 'x'.repeat(200);
|
||||||
|
const indexResult = await paginator.paginate('test/tool', raw);
|
||||||
|
const match = /"_resultId": "([^"]+)"/.exec(indexResult!.content[0]!.text);
|
||||||
|
const resultId = match![1]!;
|
||||||
|
|
||||||
|
const page999 = paginator.getPage(resultId, 999);
|
||||||
|
expect(page999).not.toBeNull();
|
||||||
|
expect(page999!.content[0]!.text).toContain('out of range');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns null after TTL expiry', async () => {
|
||||||
|
const now = Date.now();
|
||||||
|
vi.spyOn(Date, 'now').mockReturnValue(now);
|
||||||
|
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50, ttlMs: 1000 });
|
||||||
|
const raw = 'x'.repeat(200);
|
||||||
|
const indexResult = await paginator.paginate('test/tool', raw);
|
||||||
|
const match = /"_resultId": "([^"]+)"/.exec(indexResult!.content[0]!.text);
|
||||||
|
const resultId = match![1]!;
|
||||||
|
|
||||||
|
// Within TTL — should work
|
||||||
|
expect(paginator.getPage(resultId, 1)).not.toBeNull();
|
||||||
|
|
||||||
|
// Past TTL — should be null
|
||||||
|
vi.spyOn(Date, 'now').mockReturnValue(now + 1001);
|
||||||
|
expect(paginator.getPage(resultId, 1)).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- extractPaginationParams ---
|
||||||
|
|
||||||
|
describe('extractPaginationParams', () => {
|
||||||
|
it('returns null when no pagination params', () => {
|
||||||
|
expect(ResponsePaginator.extractPaginationParams({ query: 'test' })).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns null when only _resultId (no _page)', () => {
|
||||||
|
expect(ResponsePaginator.extractPaginationParams({ _resultId: 'abc' })).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns null when only _page (no _resultId)', () => {
|
||||||
|
expect(ResponsePaginator.extractPaginationParams({ _page: 1 })).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('extracts numeric page', () => {
|
||||||
|
const result = ResponsePaginator.extractPaginationParams({ _resultId: 'abc-123', _page: 2 });
|
||||||
|
expect(result).toEqual({ resultId: 'abc-123', page: 2 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('extracts _page=all', () => {
|
||||||
|
const result = ResponsePaginator.extractPaginationParams({ _resultId: 'abc-123', _page: 'all' });
|
||||||
|
expect(result).toEqual({ resultId: 'abc-123', page: 'all' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rejects negative page numbers', () => {
|
||||||
|
expect(ResponsePaginator.extractPaginationParams({ _resultId: 'abc', _page: -1 })).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rejects zero page number', () => {
|
||||||
|
expect(ResponsePaginator.extractPaginationParams({ _resultId: 'abc', _page: 0 })).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rejects non-integer page numbers', () => {
|
||||||
|
expect(ResponsePaginator.extractPaginationParams({ _resultId: 'abc', _page: 1.5 })).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('requires string resultId', () => {
|
||||||
|
expect(ResponsePaginator.extractPaginationParams({ _resultId: 123, _page: 1 })).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- Cache management ---
|
||||||
|
|
||||||
|
describe('cache management', () => {
|
||||||
|
it('evicts expired entries on paginate', async () => {
|
||||||
|
const now = Date.now();
|
||||||
|
vi.spyOn(Date, 'now').mockReturnValue(now);
|
||||||
|
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50, ttlMs: 1000 });
|
||||||
|
await paginator.paginate('test/tool1', 'x'.repeat(200));
|
||||||
|
expect(paginator.cacheSize).toBe(1);
|
||||||
|
|
||||||
|
// Advance past TTL and paginate again
|
||||||
|
vi.spyOn(Date, 'now').mockReturnValue(now + 1001);
|
||||||
|
await paginator.paginate('test/tool2', 'y'.repeat(200));
|
||||||
|
// Old entry evicted, new one added
|
||||||
|
expect(paginator.cacheSize).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('evicts LRU at capacity', async () => {
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50, maxCachedResults: 2 });
|
||||||
|
await paginator.paginate('test/tool1', 'A'.repeat(200));
|
||||||
|
await paginator.paginate('test/tool2', 'B'.repeat(200));
|
||||||
|
expect(paginator.cacheSize).toBe(2);
|
||||||
|
|
||||||
|
// Third entry should evict the first
|
||||||
|
await paginator.paginate('test/tool3', 'C'.repeat(200));
|
||||||
|
expect(paginator.cacheSize).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('clearCache removes all entries', async () => {
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50 });
|
||||||
|
await paginator.paginate('test/tool1', 'x'.repeat(200));
|
||||||
|
await paginator.paginate('test/tool2', 'y'.repeat(200));
|
||||||
|
expect(paginator.cacheSize).toBe(2);
|
||||||
|
|
||||||
|
paginator.clearCache();
|
||||||
|
expect(paginator.cacheSize).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- Page splitting ---
|
||||||
|
|
||||||
|
describe('page splitting', () => {
|
||||||
|
it('breaks at newline boundaries', async () => {
|
||||||
|
// Create content where a newline falls within the page boundary
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 60 });
|
||||||
|
const lines = Array.from({ length: 10 }, (_, i) => `line${String(i).padStart(3, '0')} ${'x'.repeat(20)}`);
|
||||||
|
const raw = lines.join('\n');
|
||||||
|
// raw is ~269 chars
|
||||||
|
const result = await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
expect(result).not.toBeNull();
|
||||||
|
// Pages should break at newline boundaries, not mid-line
|
||||||
|
const text = result!.content[0]!.text;
|
||||||
|
const match = /"_resultId": "([^"]+)"/.exec(text);
|
||||||
|
const resultId = match![1]!;
|
||||||
|
|
||||||
|
const page1 = paginator.getPage(resultId, 1);
|
||||||
|
expect(page1).not.toBeNull();
|
||||||
|
// Page content should end at a newline boundary (no partial lines)
|
||||||
|
const pageText = page1!.content[0]!.text;
|
||||||
|
// Remove the header line
|
||||||
|
const contentStart = pageText.indexOf('\n\n') + 2;
|
||||||
|
const pageContent = pageText.slice(contentStart);
|
||||||
|
// Content should contain complete lines
|
||||||
|
expect(pageContent).toMatch(/line\d{3}/);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles content without newlines', async () => {
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50 });
|
||||||
|
const raw = 'x'.repeat(200); // No newlines at all
|
||||||
|
const result = await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
expect(result).not.toBeNull();
|
||||||
|
const text = result!.content[0]!.text;
|
||||||
|
expect(text).toContain('4 pages'); // 200/50 = 4
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles content that fits exactly in one page at threshold', async () => {
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 100 });
|
||||||
|
const raw = 'x'.repeat(100); // Exactly at threshold and page size
|
||||||
|
const result = await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
expect(result).not.toBeNull();
|
||||||
|
const text = result!.content[0]!.text;
|
||||||
|
expect(text).toContain('1 pages');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -2,3 +2,4 @@ export * from './types/index.js';
|
|||||||
export * from './validation/index.js';
|
export * from './validation/index.js';
|
||||||
export * from './constants/index.js';
|
export * from './constants/index.js';
|
||||||
export * from './utils/index.js';
|
export * from './utils/index.js';
|
||||||
|
export * from './secrets/index.js';
|
||||||
|
|||||||
63
src/shared/src/secrets/file-store.ts
Normal file
63
src/shared/src/secrets/file-store.ts
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import { existsSync, mkdirSync, readFileSync, writeFileSync, chmodSync } from 'node:fs';
|
||||||
|
import { join } from 'node:path';
|
||||||
|
import { homedir } from 'node:os';
|
||||||
|
import type { SecretStore, SecretStoreDeps } from './types.js';
|
||||||
|
|
||||||
|
function defaultConfigDir(): string {
|
||||||
|
return join(homedir(), '.mcpctl');
|
||||||
|
}
|
||||||
|
|
||||||
|
function secretsPath(configDir: string): string {
|
||||||
|
return join(configDir, 'secrets');
|
||||||
|
}
|
||||||
|
|
||||||
|
export class FileSecretStore implements SecretStore {
|
||||||
|
private readonly configDir: string;
|
||||||
|
|
||||||
|
constructor(deps?: SecretStoreDeps) {
|
||||||
|
this.configDir = deps?.configDir ?? defaultConfigDir();
|
||||||
|
}
|
||||||
|
|
||||||
|
backend(): string {
|
||||||
|
return 'file';
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(key: string): Promise<string | null> {
|
||||||
|
const data = this.readAll();
|
||||||
|
return data[key] ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async set(key: string, value: string): Promise<void> {
|
||||||
|
const data = this.readAll();
|
||||||
|
data[key] = value;
|
||||||
|
this.writeAll(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(key: string): Promise<boolean> {
|
||||||
|
const data = this.readAll();
|
||||||
|
if (!(key in data)) return false;
|
||||||
|
delete data[key];
|
||||||
|
this.writeAll(data);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private readAll(): Record<string, string> {
|
||||||
|
const path = secretsPath(this.configDir);
|
||||||
|
if (!existsSync(path)) return {};
|
||||||
|
try {
|
||||||
|
const raw = readFileSync(path, 'utf-8');
|
||||||
|
return JSON.parse(raw) as Record<string, string>;
|
||||||
|
} catch {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private writeAll(data: Record<string, string>): void {
|
||||||
|
if (!existsSync(this.configDir)) {
|
||||||
|
mkdirSync(this.configDir, { recursive: true });
|
||||||
|
}
|
||||||
|
const path = secretsPath(this.configDir);
|
||||||
|
writeFileSync(path, JSON.stringify(data, null, 2) + '\n', 'utf-8');
|
||||||
|
chmodSync(path, 0o600);
|
||||||
|
}
|
||||||
|
}
|
||||||
97
src/shared/src/secrets/gnome-keyring.ts
Normal file
97
src/shared/src/secrets/gnome-keyring.ts
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
import { spawn } from 'node:child_process';
|
||||||
|
import { execFile } from 'node:child_process';
|
||||||
|
import { promisify } from 'node:util';
|
||||||
|
import type { SecretStore } from './types.js';
|
||||||
|
|
||||||
|
const execFileAsync = promisify(execFile);
|
||||||
|
const SERVICE = 'mcpctl';
|
||||||
|
|
||||||
|
export type RunCommand = (cmd: string, args: string[], stdin?: string) => Promise<{ stdout: string; code: number }>;
|
||||||
|
|
||||||
|
function defaultRunCommand(cmd: string, args: string[], stdin?: string): Promise<{ stdout: string; code: number }> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const child = spawn(cmd, args, {
|
||||||
|
stdio: ['pipe', 'pipe', 'pipe'],
|
||||||
|
timeout: 5000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const stdoutChunks: Buffer[] = [];
|
||||||
|
child.stdout.on('data', (chunk: Buffer) => stdoutChunks.push(chunk));
|
||||||
|
|
||||||
|
child.on('error', reject);
|
||||||
|
child.on('close', (code) => {
|
||||||
|
const stdout = Buffer.concat(stdoutChunks).toString('utf-8');
|
||||||
|
resolve({ stdout, code: code ?? 1 });
|
||||||
|
});
|
||||||
|
|
||||||
|
if (stdin !== undefined) {
|
||||||
|
child.stdin.write(stdin);
|
||||||
|
child.stdin.end();
|
||||||
|
} else {
|
||||||
|
child.stdin.end();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GnomeKeyringDeps {
|
||||||
|
run?: RunCommand;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GnomeKeyringStore implements SecretStore {
|
||||||
|
private readonly run: RunCommand;
|
||||||
|
|
||||||
|
constructor(deps?: GnomeKeyringDeps) {
|
||||||
|
this.run = deps?.run ?? defaultRunCommand;
|
||||||
|
}
|
||||||
|
|
||||||
|
backend(): string {
|
||||||
|
return 'gnome-keyring';
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(key: string): Promise<string | null> {
|
||||||
|
try {
|
||||||
|
const { stdout, code } = await this.run(
|
||||||
|
'secret-tool', ['lookup', 'service', SERVICE, 'key', key],
|
||||||
|
);
|
||||||
|
if (code !== 0 || !stdout) return null;
|
||||||
|
return stdout;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async set(key: string, value: string): Promise<void> {
|
||||||
|
const { code } = await this.run(
|
||||||
|
'secret-tool',
|
||||||
|
['store', '--label', `mcpctl: ${key}`, 'service', SERVICE, 'key', key],
|
||||||
|
value,
|
||||||
|
);
|
||||||
|
if (code !== 0) {
|
||||||
|
throw new Error(`secret-tool store exited with code ${code}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(key: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const { code } = await this.run(
|
||||||
|
'secret-tool', ['clear', 'service', SERVICE, 'key', key],
|
||||||
|
);
|
||||||
|
return code === 0;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static async isAvailable(deps?: { run?: RunCommand }): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
if (deps?.run) {
|
||||||
|
const { code } = await deps.run('secret-tool', ['--version']);
|
||||||
|
return code === 0;
|
||||||
|
}
|
||||||
|
await execFileAsync('secret-tool', ['--version'], { timeout: 3000 });
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
15
src/shared/src/secrets/index.ts
Normal file
15
src/shared/src/secrets/index.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
export type { SecretStore, SecretStoreDeps } from './types.js';
|
||||||
|
export { FileSecretStore } from './file-store.js';
|
||||||
|
export { GnomeKeyringStore } from './gnome-keyring.js';
|
||||||
|
export type { GnomeKeyringDeps, RunCommand } from './gnome-keyring.js';
|
||||||
|
|
||||||
|
import { GnomeKeyringStore } from './gnome-keyring.js';
|
||||||
|
import { FileSecretStore } from './file-store.js';
|
||||||
|
import type { SecretStore, SecretStoreDeps } from './types.js';
|
||||||
|
|
||||||
|
export async function createSecretStore(deps?: SecretStoreDeps): Promise<SecretStore> {
|
||||||
|
if (await GnomeKeyringStore.isAvailable()) {
|
||||||
|
return new GnomeKeyringStore();
|
||||||
|
}
|
||||||
|
return new FileSecretStore(deps);
|
||||||
|
}
|
||||||
10
src/shared/src/secrets/types.ts
Normal file
10
src/shared/src/secrets/types.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
export interface SecretStore {
|
||||||
|
get(key: string): Promise<string | null>;
|
||||||
|
set(key: string, value: string): Promise<void>;
|
||||||
|
delete(key: string): Promise<boolean>;
|
||||||
|
backend(): string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SecretStoreDeps {
|
||||||
|
configDir?: string;
|
||||||
|
}
|
||||||
24
src/shared/tests/secrets/factory.test.ts
Normal file
24
src/shared/tests/secrets/factory.test.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import { describe, it, expect, vi, afterEach } from 'vitest';
|
||||||
|
import { createSecretStore } from '../../src/secrets/index.js';
|
||||||
|
import { GnomeKeyringStore } from '../../src/secrets/gnome-keyring.js';
|
||||||
|
import { FileSecretStore } from '../../src/secrets/file-store.js';
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('createSecretStore', () => {
|
||||||
|
it('returns GnomeKeyringStore when secret-tool is available', async () => {
|
||||||
|
vi.spyOn(GnomeKeyringStore, 'isAvailable').mockResolvedValue(true);
|
||||||
|
const store = await createSecretStore();
|
||||||
|
expect(store.backend()).toBe('gnome-keyring');
|
||||||
|
expect(store).toBeInstanceOf(GnomeKeyringStore);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns FileSecretStore when secret-tool is not available', async () => {
|
||||||
|
vi.spyOn(GnomeKeyringStore, 'isAvailable').mockResolvedValue(false);
|
||||||
|
const store = await createSecretStore();
|
||||||
|
expect(store.backend()).toBe('file');
|
||||||
|
expect(store).toBeInstanceOf(FileSecretStore);
|
||||||
|
});
|
||||||
|
});
|
||||||
93
src/shared/tests/secrets/file-store.test.ts
Normal file
93
src/shared/tests/secrets/file-store.test.ts
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { mkdtempSync, rmSync, statSync, existsSync, writeFileSync } from 'node:fs';
|
||||||
|
import { join } from 'node:path';
|
||||||
|
import { tmpdir } from 'node:os';
|
||||||
|
import { FileSecretStore } from '../../src/secrets/file-store.js';
|
||||||
|
|
||||||
|
let tempDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-secrets-test-'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
rmSync(tempDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('FileSecretStore', () => {
|
||||||
|
it('returns null for missing key', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
expect(await store.get('nonexistent')).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('stores and retrieves a secret', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
await store.set('api-key', 'sk-12345');
|
||||||
|
expect(await store.get('api-key')).toBe('sk-12345');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('overwrites existing values', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
await store.set('api-key', 'old-value');
|
||||||
|
await store.set('api-key', 'new-value');
|
||||||
|
expect(await store.get('api-key')).toBe('new-value');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('stores multiple keys', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
await store.set('key-a', 'value-a');
|
||||||
|
await store.set('key-b', 'value-b');
|
||||||
|
expect(await store.get('key-a')).toBe('value-a');
|
||||||
|
expect(await store.get('key-b')).toBe('value-b');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('deletes a key', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
await store.set('api-key', 'sk-12345');
|
||||||
|
expect(await store.delete('api-key')).toBe(true);
|
||||||
|
expect(await store.get('api-key')).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false when deleting nonexistent key', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
expect(await store.delete('nonexistent')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('sets 0600 permissions on secrets file', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
await store.set('api-key', 'sk-12345');
|
||||||
|
const stat = statSync(join(tempDir, 'secrets'));
|
||||||
|
expect(stat.mode & 0o777).toBe(0o600);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates config dir if missing', async () => {
|
||||||
|
const nested = join(tempDir, 'sub', 'dir');
|
||||||
|
const store = new FileSecretStore({ configDir: nested });
|
||||||
|
await store.set('api-key', 'sk-12345');
|
||||||
|
expect(existsSync(join(nested, 'secrets'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('recovers from corrupted JSON', async () => {
|
||||||
|
writeFileSync(join(tempDir, 'secrets'), 'NOT JSON!!!', 'utf-8');
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
// Should not throw, returns null for any key
|
||||||
|
expect(await store.get('api-key')).toBeNull();
|
||||||
|
// Should be able to write over corrupted file
|
||||||
|
await store.set('api-key', 'fresh-value');
|
||||||
|
expect(await store.get('api-key')).toBe('fresh-value');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('reports file backend', () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
expect(store.backend()).toBe('file');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('preserves other keys on delete', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
await store.set('key-a', 'value-a');
|
||||||
|
await store.set('key-b', 'value-b');
|
||||||
|
await store.delete('key-a');
|
||||||
|
expect(await store.get('key-a')).toBeNull();
|
||||||
|
expect(await store.get('key-b')).toBe('value-b');
|
||||||
|
});
|
||||||
|
});
|
||||||
125
src/shared/tests/secrets/gnome-keyring.test.ts
Normal file
125
src/shared/tests/secrets/gnome-keyring.test.ts
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
import { describe, it, expect, vi } from 'vitest';
|
||||||
|
import { GnomeKeyringStore } from '../../src/secrets/gnome-keyring.js';
|
||||||
|
import type { RunCommand } from '../../src/secrets/gnome-keyring.js';
|
||||||
|
|
||||||
|
function mockRun(
|
||||||
|
responses: Record<string, { stdout: string; code: number }>,
|
||||||
|
): RunCommand {
|
||||||
|
return vi.fn(async (cmd: string, args: string[], _stdin?: string) => {
|
||||||
|
const key = `${cmd} ${args.join(' ')}`;
|
||||||
|
for (const [pattern, response] of Object.entries(responses)) {
|
||||||
|
if (key.includes(pattern)) return response;
|
||||||
|
}
|
||||||
|
return { stdout: '', code: 1 };
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('GnomeKeyringStore', () => {
|
||||||
|
describe('get', () => {
|
||||||
|
it('returns value on success', async () => {
|
||||||
|
const run = mockRun({ 'lookup': { stdout: 'my-secret', code: 0 } });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.get('api-key')).toBe('my-secret');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns null on exit code 1', async () => {
|
||||||
|
const run = mockRun({ 'lookup': { stdout: '', code: 1 } });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.get('api-key')).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns null on empty stdout', async () => {
|
||||||
|
const run = mockRun({ 'lookup': { stdout: '', code: 0 } });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.get('api-key')).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns null on error', async () => {
|
||||||
|
const run = vi.fn().mockRejectedValue(new Error('timeout'));
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.get('api-key')).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('calls secret-tool with correct args', async () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: 'val', code: 0 });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
await store.get('my-key');
|
||||||
|
expect(run).toHaveBeenCalledWith(
|
||||||
|
'secret-tool',
|
||||||
|
['lookup', 'service', 'mcpctl', 'key', 'my-key'],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('set', () => {
|
||||||
|
it('calls secret-tool store with value as stdin', async () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: '', code: 0 });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
await store.set('api-key', 'secret-value');
|
||||||
|
expect(run).toHaveBeenCalledWith(
|
||||||
|
'secret-tool',
|
||||||
|
['store', '--label', 'mcpctl: api-key', 'service', 'mcpctl', 'key', 'api-key'],
|
||||||
|
'secret-value',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('throws on non-zero exit code', async () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: '', code: 1 });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
await expect(store.set('api-key', 'val')).rejects.toThrow('exited with code 1');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('delete', () => {
|
||||||
|
it('returns true on success', async () => {
|
||||||
|
const run = mockRun({ 'clear': { stdout: '', code: 0 } });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.delete('api-key')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false on failure', async () => {
|
||||||
|
const run = mockRun({ 'clear': { stdout: '', code: 1 } });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.delete('api-key')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false on error', async () => {
|
||||||
|
const run = vi.fn().mockRejectedValue(new Error('fail'));
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.delete('api-key')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('calls secret-tool clear with correct args', async () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: '', code: 0 });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
await store.delete('my-key');
|
||||||
|
expect(run).toHaveBeenCalledWith(
|
||||||
|
'secret-tool',
|
||||||
|
['clear', 'service', 'mcpctl', 'key', 'my-key'],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('isAvailable', () => {
|
||||||
|
it('returns true when secret-tool exists', async () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: '0.20', code: 0 });
|
||||||
|
expect(await GnomeKeyringStore.isAvailable({ run })).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false when secret-tool not found', async () => {
|
||||||
|
const run = vi.fn().mockRejectedValue(new Error('ENOENT'));
|
||||||
|
expect(await GnomeKeyringStore.isAvailable({ run })).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false on non-zero exit', async () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: '', code: 127 });
|
||||||
|
expect(await GnomeKeyringStore.isAvailable({ run })).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('reports gnome-keyring backend', () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: '', code: 0 });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(store.backend()).toBe('gnome-keyring');
|
||||||
|
});
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user