Compare commits
15 Commits
feat/respo
...
feat/tiere
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ce705608b | ||
|
|
0824f8e635 | ||
|
|
9bd3127519 | ||
| e8ac500ae9 | |||
|
|
bed725b387 | ||
| 17a456d835 | |||
|
|
9481d394a1 | ||
|
|
bc769c4eeb | ||
| 6f534c8ba9 | |||
|
|
11da8b1fbf | ||
|
|
848868d45f | ||
|
|
869217a07a | ||
| 04d115933b | |||
|
|
7c23da10c6 | ||
| 32b4de4343 |
@@ -2,10 +2,10 @@ _mcpctl() {
|
|||||||
local cur prev words cword
|
local cur prev words cword
|
||||||
_init_completion || return
|
_init_completion || return
|
||||||
|
|
||||||
local commands="status login logout config get describe delete logs create edit apply backup restore mcp help"
|
local commands="status login logout config get describe delete logs create edit apply backup restore mcp approve help"
|
||||||
local project_commands="attach-server detach-server get describe delete logs create edit help"
|
local project_commands="attach-server detach-server get describe delete logs create edit help"
|
||||||
local global_opts="-v --version --daemon-url --direct --project -h --help"
|
local global_opts="-v --version --daemon-url --direct --project -h --help"
|
||||||
local resources="servers instances secrets templates projects users groups rbac"
|
local resources="servers instances secrets templates projects users groups rbac prompts promptrequests"
|
||||||
|
|
||||||
# Check if --project was given
|
# Check if --project was given
|
||||||
local has_project=false
|
local has_project=false
|
||||||
@@ -78,7 +78,7 @@ _mcpctl() {
|
|||||||
case "$subcmd" in
|
case "$subcmd" in
|
||||||
config)
|
config)
|
||||||
if [[ $((cword - subcmd_pos)) -eq 1 ]]; then
|
if [[ $((cword - subcmd_pos)) -eq 1 ]]; then
|
||||||
COMPREPLY=($(compgen -W "view set path reset claude impersonate help" -- "$cur"))
|
COMPREPLY=($(compgen -W "view set path reset claude claude-generate setup impersonate help" -- "$cur"))
|
||||||
fi
|
fi
|
||||||
return ;;
|
return ;;
|
||||||
status)
|
status)
|
||||||
@@ -114,7 +114,7 @@ _mcpctl() {
|
|||||||
return ;;
|
return ;;
|
||||||
create)
|
create)
|
||||||
if [[ $((cword - subcmd_pos)) -eq 1 ]]; then
|
if [[ $((cword - subcmd_pos)) -eq 1 ]]; then
|
||||||
COMPREPLY=($(compgen -W "server secret project user group rbac help" -- "$cur"))
|
COMPREPLY=($(compgen -W "server secret project user group rbac prompt promptrequest help" -- "$cur"))
|
||||||
fi
|
fi
|
||||||
return ;;
|
return ;;
|
||||||
apply)
|
apply)
|
||||||
@@ -150,6 +150,15 @@ _mcpctl() {
|
|||||||
fi
|
fi
|
||||||
COMPREPLY=($(compgen -W "$names" -- "$cur"))
|
COMPREPLY=($(compgen -W "$names" -- "$cur"))
|
||||||
return ;;
|
return ;;
|
||||||
|
approve)
|
||||||
|
if [[ -z "$resource_type" ]]; then
|
||||||
|
COMPREPLY=($(compgen -W "promptrequest" -- "$cur"))
|
||||||
|
else
|
||||||
|
local names
|
||||||
|
names=$(_mcpctl_resource_names "$resource_type")
|
||||||
|
COMPREPLY=($(compgen -W "$names" -- "$cur"))
|
||||||
|
fi
|
||||||
|
return ;;
|
||||||
help)
|
help)
|
||||||
COMPREPLY=($(compgen -W "$commands" -- "$cur"))
|
COMPREPLY=($(compgen -W "$commands" -- "$cur"))
|
||||||
return ;;
|
return ;;
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
# Erase any stale completions from previous versions
|
# Erase any stale completions from previous versions
|
||||||
complete -c mcpctl -e
|
complete -c mcpctl -e
|
||||||
|
|
||||||
set -l commands status login logout config get describe delete logs create edit apply backup restore mcp help
|
set -l commands status login logout config get describe delete logs create edit apply backup restore mcp approve help
|
||||||
set -l project_commands attach-server detach-server get describe delete logs create edit help
|
set -l project_commands attach-server detach-server get describe delete logs create edit help
|
||||||
|
|
||||||
# Disable file completions by default
|
# Disable file completions by default
|
||||||
@@ -28,7 +28,7 @@ function __mcpctl_has_project
|
|||||||
end
|
end
|
||||||
|
|
||||||
# Helper: check if a resource type has been selected after get/describe/delete/edit
|
# Helper: check if a resource type has been selected after get/describe/delete/edit
|
||||||
set -l resources servers instances secrets templates projects users groups rbac
|
set -l resources servers instances secrets templates projects users groups rbac prompts promptrequests
|
||||||
|
|
||||||
function __mcpctl_needs_resource_type
|
function __mcpctl_needs_resource_type
|
||||||
set -l tokens (commandline -opc)
|
set -l tokens (commandline -opc)
|
||||||
@@ -36,11 +36,11 @@ function __mcpctl_needs_resource_type
|
|||||||
for tok in $tokens
|
for tok in $tokens
|
||||||
if $found_cmd
|
if $found_cmd
|
||||||
# Check if next token after get/describe/delete/edit is a resource type
|
# Check if next token after get/describe/delete/edit is a resource type
|
||||||
if contains -- $tok servers instances secrets templates projects users groups rbac
|
if contains -- $tok servers instances secrets templates projects users groups rbac prompts promptrequests
|
||||||
return 1 # resource type already present
|
return 1 # resource type already present
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
if contains -- $tok get describe delete edit
|
if contains -- $tok get describe delete edit approve
|
||||||
set found_cmd true
|
set found_cmd true
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -55,12 +55,12 @@ function __mcpctl_get_resource_type
|
|||||||
set -l found_cmd false
|
set -l found_cmd false
|
||||||
for tok in $tokens
|
for tok in $tokens
|
||||||
if $found_cmd
|
if $found_cmd
|
||||||
if contains -- $tok servers instances secrets templates projects users groups rbac
|
if contains -- $tok servers instances secrets templates projects users groups rbac prompts promptrequests
|
||||||
echo $tok
|
echo $tok
|
||||||
return
|
return
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
if contains -- $tok get describe delete edit
|
if contains -- $tok get describe delete edit approve
|
||||||
set found_cmd true
|
set found_cmd true
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@@ -139,6 +139,7 @@ complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_
|
|||||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a apply -d 'Apply configuration from file'
|
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a apply -d 'Apply configuration from file'
|
||||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a backup -d 'Backup configuration'
|
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a backup -d 'Backup configuration'
|
||||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a restore -d 'Restore from backup'
|
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a restore -d 'Restore from backup'
|
||||||
|
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a approve -d 'Approve a prompt request'
|
||||||
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a help -d 'Show help'
|
complete -c mcpctl -n "not __mcpctl_has_project; and not __fish_seen_subcommand_from $commands" -a help -d 'Show help'
|
||||||
|
|
||||||
# Project-scoped commands (with --project)
|
# Project-scoped commands (with --project)
|
||||||
@@ -157,7 +158,7 @@ complete -c mcpctl -n "__fish_seen_subcommand_from get describe delete; and __mc
|
|||||||
complete -c mcpctl -n "__fish_seen_subcommand_from edit; and __mcpctl_needs_resource_type" -a 'servers projects' -d 'Resource type'
|
complete -c mcpctl -n "__fish_seen_subcommand_from edit; and __mcpctl_needs_resource_type" -a 'servers projects' -d 'Resource type'
|
||||||
|
|
||||||
# Resource names — after resource type is selected
|
# Resource names — after resource type is selected
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from get describe delete edit; and not __mcpctl_needs_resource_type" -a '(__mcpctl_resource_names)' -d 'Resource name'
|
complete -c mcpctl -n "__fish_seen_subcommand_from get describe delete edit approve; and not __mcpctl_needs_resource_type" -a '(__mcpctl_resource_names)' -d 'Resource name'
|
||||||
|
|
||||||
# Helper: check if attach-server/detach-server already has a server argument
|
# Helper: check if attach-server/detach-server already has a server argument
|
||||||
function __mcpctl_needs_server_arg
|
function __mcpctl_needs_server_arg
|
||||||
@@ -196,22 +197,25 @@ complete -c mcpctl -n "__fish_seen_subcommand_from login" -l email -d 'Email add
|
|||||||
complete -c mcpctl -n "__fish_seen_subcommand_from login" -l password -d 'Password' -x
|
complete -c mcpctl -n "__fish_seen_subcommand_from login" -l password -d 'Password' -x
|
||||||
|
|
||||||
# config subcommands
|
# config subcommands
|
||||||
set -l config_cmds view set path reset claude claude-generate impersonate
|
set -l config_cmds view set path reset claude claude-generate setup impersonate
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a view -d 'Show configuration'
|
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a view -d 'Show configuration'
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a set -d 'Set a config value'
|
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a set -d 'Set a config value'
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a path -d 'Show config file path'
|
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a path -d 'Show config file path'
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a reset -d 'Reset to defaults'
|
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a reset -d 'Reset to defaults'
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a claude -d 'Generate .mcp.json for project'
|
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a claude -d 'Generate .mcp.json for project'
|
||||||
|
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a setup -d 'Configure LLM provider'
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a impersonate -d 'Impersonate a user'
|
complete -c mcpctl -n "__fish_seen_subcommand_from config; and not __fish_seen_subcommand_from $config_cmds" -a impersonate -d 'Impersonate a user'
|
||||||
|
|
||||||
# create subcommands
|
# create subcommands
|
||||||
set -l create_cmds server secret project user group rbac
|
set -l create_cmds server secret project user group rbac prompt promptrequest
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a server -d 'Create a server'
|
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a server -d 'Create a server'
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a secret -d 'Create a secret'
|
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a secret -d 'Create a secret'
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a project -d 'Create a project'
|
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a project -d 'Create a project'
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a user -d 'Create a user'
|
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a user -d 'Create a user'
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a group -d 'Create a group'
|
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a group -d 'Create a group'
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a rbac -d 'Create an RBAC binding'
|
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a rbac -d 'Create an RBAC binding'
|
||||||
|
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a prompt -d 'Create an approved prompt'
|
||||||
|
complete -c mcpctl -n "__fish_seen_subcommand_from create; and not __fish_seen_subcommand_from $create_cmds" -a promptrequest -d 'Create a prompt request'
|
||||||
|
|
||||||
# logs options
|
# logs options
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from logs" -l tail -d 'Number of lines' -x
|
complete -c mcpctl -n "__fish_seen_subcommand_from logs" -l tail -d 'Number of lines' -x
|
||||||
@@ -227,6 +231,9 @@ complete -c mcpctl -n "__fish_seen_subcommand_from restore" -s i -l input -d 'In
|
|||||||
complete -c mcpctl -n "__fish_seen_subcommand_from restore" -s p -l password -d 'Decryption password' -x
|
complete -c mcpctl -n "__fish_seen_subcommand_from restore" -s p -l password -d 'Decryption password' -x
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from restore" -s c -l conflict -d 'Conflict strategy' -xa 'skip overwrite fail'
|
complete -c mcpctl -n "__fish_seen_subcommand_from restore" -s c -l conflict -d 'Conflict strategy' -xa 'skip overwrite fail'
|
||||||
|
|
||||||
|
# approve: first arg is resource type (promptrequest only), second is name
|
||||||
|
complete -c mcpctl -n "__fish_seen_subcommand_from approve; and __mcpctl_needs_resource_type" -a 'promptrequest' -d 'Resource type'
|
||||||
|
|
||||||
# apply takes a file
|
# apply takes a file
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from apply" -s f -l file -d 'Configuration file' -rF
|
complete -c mcpctl -n "__fish_seen_subcommand_from apply" -s f -l file -d 'Configuration file' -rF
|
||||||
complete -c mcpctl -n "__fish_seen_subcommand_from apply" -F
|
complete -c mcpctl -n "__fish_seen_subcommand_from apply" -F
|
||||||
|
|||||||
464
src/cli/src/commands/config-setup.ts
Normal file
464
src/cli/src/commands/config-setup.ts
Normal file
@@ -0,0 +1,464 @@
|
|||||||
|
import { Command } from 'commander';
|
||||||
|
import http from 'node:http';
|
||||||
|
import https from 'node:https';
|
||||||
|
import { execFile } from 'node:child_process';
|
||||||
|
import { promisify } from 'node:util';
|
||||||
|
import { loadConfig, saveConfig } from '../config/index.js';
|
||||||
|
import type { ConfigLoaderDeps, McpctlConfig, LlmConfig, LlmProviderName, LlmProviderEntry, LlmTier } from '../config/index.js';
|
||||||
|
import type { SecretStore } from '@mcpctl/shared';
|
||||||
|
import { createSecretStore } from '@mcpctl/shared';
|
||||||
|
|
||||||
|
const execFileAsync = promisify(execFile);
|
||||||
|
|
||||||
|
export interface ConfigSetupPrompt {
|
||||||
|
select<T>(message: string, choices: Array<{ name: string; value: T; description?: string }>): Promise<T>;
|
||||||
|
input(message: string, defaultValue?: string): Promise<string>;
|
||||||
|
password(message: string): Promise<string>;
|
||||||
|
confirm(message: string, defaultValue?: boolean): Promise<boolean>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ConfigSetupDeps {
|
||||||
|
configDeps: Partial<ConfigLoaderDeps>;
|
||||||
|
secretStore: SecretStore;
|
||||||
|
log: (...args: string[]) => void;
|
||||||
|
prompt: ConfigSetupPrompt;
|
||||||
|
fetchModels: (url: string, path: string) => Promise<string[]>;
|
||||||
|
whichBinary: (name: string) => Promise<string | null>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ProviderChoice {
|
||||||
|
name: string;
|
||||||
|
value: LlmProviderName;
|
||||||
|
description: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Provider config fields returned by per-provider setup functions. */
|
||||||
|
interface ProviderFields {
|
||||||
|
model?: string;
|
||||||
|
url?: string;
|
||||||
|
binaryPath?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const FAST_PROVIDER_CHOICES: ProviderChoice[] = [
|
||||||
|
{ name: 'vLLM', value: 'vllm', description: 'Self-hosted vLLM (OpenAI-compatible)' },
|
||||||
|
{ name: 'Ollama', value: 'ollama', description: 'Local models via Ollama' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const HEAVY_PROVIDER_CHOICES: ProviderChoice[] = [
|
||||||
|
{ name: 'Gemini CLI', value: 'gemini-cli', description: 'Google Gemini via local CLI (free, no API key)' },
|
||||||
|
{ name: 'Anthropic (Claude)', value: 'anthropic', description: 'Claude API (requires API key)' },
|
||||||
|
{ name: 'OpenAI', value: 'openai', description: 'OpenAI API (requires API key)' },
|
||||||
|
{ name: 'DeepSeek', value: 'deepseek', description: 'DeepSeek API (requires API key)' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const ALL_PROVIDER_CHOICES: ProviderChoice[] = [
|
||||||
|
...FAST_PROVIDER_CHOICES,
|
||||||
|
...HEAVY_PROVIDER_CHOICES,
|
||||||
|
{ name: 'None (disable)', value: 'none', description: 'Disable LLM features' },
|
||||||
|
];
|
||||||
|
|
||||||
|
const GEMINI_MODELS = ['gemini-2.5-flash', 'gemini-2.5-pro', 'gemini-2.0-flash'];
|
||||||
|
const ANTHROPIC_MODELS = ['claude-haiku-3-5-20241022', 'claude-sonnet-4-20250514', 'claude-opus-4-20250514'];
|
||||||
|
const DEEPSEEK_MODELS = ['deepseek-chat', 'deepseek-reasoner'];
|
||||||
|
|
||||||
|
function defaultFetchModels(baseUrl: string, path: string): Promise<string[]> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const url = new URL(path, baseUrl);
|
||||||
|
const isHttps = url.protocol === 'https:';
|
||||||
|
const transport = isHttps ? https : http;
|
||||||
|
|
||||||
|
const req = transport.get({
|
||||||
|
hostname: url.hostname,
|
||||||
|
port: url.port || (isHttps ? 443 : 80),
|
||||||
|
path: url.pathname,
|
||||||
|
timeout: 5000,
|
||||||
|
}, (res) => {
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||||
|
res.on('end', () => {
|
||||||
|
try {
|
||||||
|
const raw = Buffer.concat(chunks).toString('utf-8');
|
||||||
|
const data = JSON.parse(raw) as { models?: Array<{ name: string }>; data?: Array<{ id: string }> };
|
||||||
|
// Ollama format: { models: [{ name }] }
|
||||||
|
if (data.models) {
|
||||||
|
resolve(data.models.map((m) => m.name));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// OpenAI/vLLM format: { data: [{ id }] }
|
||||||
|
if (data.data) {
|
||||||
|
resolve(data.data.map((m) => m.id));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
resolve([]);
|
||||||
|
} catch {
|
||||||
|
resolve([]);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
req.on('error', () => resolve([]));
|
||||||
|
req.on('timeout', () => { req.destroy(); resolve([]); });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function defaultSelect<T>(message: string, choices: Array<{ name: string; value: T; description?: string }>): Promise<T> {
|
||||||
|
const { default: inquirer } = await import('inquirer');
|
||||||
|
const { answer } = await inquirer.prompt([{
|
||||||
|
type: 'list',
|
||||||
|
name: 'answer',
|
||||||
|
message,
|
||||||
|
choices: choices.map((c) => ({
|
||||||
|
name: c.description ? `${c.name} — ${c.description}` : c.name,
|
||||||
|
value: c.value,
|
||||||
|
short: c.name,
|
||||||
|
})),
|
||||||
|
}]);
|
||||||
|
return answer as T;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function defaultInput(message: string, defaultValue?: string): Promise<string> {
|
||||||
|
const { default: inquirer } = await import('inquirer');
|
||||||
|
const { answer } = await inquirer.prompt([{
|
||||||
|
type: 'input',
|
||||||
|
name: 'answer',
|
||||||
|
message,
|
||||||
|
default: defaultValue,
|
||||||
|
}]);
|
||||||
|
return answer as string;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function defaultPassword(message: string): Promise<string> {
|
||||||
|
const { default: inquirer } = await import('inquirer');
|
||||||
|
const { answer } = await inquirer.prompt([{ type: 'password', name: 'answer', message }]);
|
||||||
|
return answer as string;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function defaultConfirm(message: string, defaultValue?: boolean): Promise<boolean> {
|
||||||
|
const { default: inquirer } = await import('inquirer');
|
||||||
|
const { answer } = await inquirer.prompt([{
|
||||||
|
type: 'confirm',
|
||||||
|
name: 'answer',
|
||||||
|
message,
|
||||||
|
default: defaultValue ?? true,
|
||||||
|
}]);
|
||||||
|
return answer as boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
const defaultPrompt: ConfigSetupPrompt = {
|
||||||
|
select: defaultSelect,
|
||||||
|
input: defaultInput,
|
||||||
|
password: defaultPassword,
|
||||||
|
confirm: defaultConfirm,
|
||||||
|
};
|
||||||
|
|
||||||
|
async function defaultWhichBinary(name: string): Promise<string | null> {
|
||||||
|
try {
|
||||||
|
const { stdout } = await execFileAsync('which', [name], { timeout: 3000 });
|
||||||
|
const path = stdout.trim();
|
||||||
|
return path || null;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Per-provider setup functions (return ProviderFields for reuse in both modes) ---
|
||||||
|
|
||||||
|
async function setupGeminiCliFields(
|
||||||
|
prompt: ConfigSetupPrompt,
|
||||||
|
log: (...args: string[]) => void,
|
||||||
|
whichBinary: (name: string) => Promise<string | null>,
|
||||||
|
currentModel?: string,
|
||||||
|
): Promise<ProviderFields> {
|
||||||
|
const model = await prompt.select<string>('Select model:', [
|
||||||
|
...GEMINI_MODELS.map((m) => ({
|
||||||
|
name: m === currentModel ? `${m} (current)` : m,
|
||||||
|
value: m,
|
||||||
|
})),
|
||||||
|
{ name: 'Custom...', value: '__custom__' },
|
||||||
|
]);
|
||||||
|
|
||||||
|
const finalModel = model === '__custom__'
|
||||||
|
? await prompt.input('Model name:', currentModel)
|
||||||
|
: model;
|
||||||
|
|
||||||
|
let binaryPath: string | undefined;
|
||||||
|
const detected = await whichBinary('gemini');
|
||||||
|
if (detected) {
|
||||||
|
log(`Found gemini at: ${detected}`);
|
||||||
|
binaryPath = detected;
|
||||||
|
} else {
|
||||||
|
log('Warning: gemini binary not found in PATH');
|
||||||
|
const manualPath = await prompt.input('Binary path (or install with: npm i -g @google/gemini-cli):');
|
||||||
|
if (manualPath) binaryPath = manualPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
const result: ProviderFields = { model: finalModel };
|
||||||
|
if (binaryPath) result.binaryPath = binaryPath;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function setupOllamaFields(
|
||||||
|
prompt: ConfigSetupPrompt,
|
||||||
|
fetchModels: ConfigSetupDeps['fetchModels'],
|
||||||
|
currentUrl?: string,
|
||||||
|
currentModel?: string,
|
||||||
|
): Promise<ProviderFields> {
|
||||||
|
const url = await prompt.input('Ollama URL:', currentUrl ?? 'http://localhost:11434');
|
||||||
|
const models = await fetchModels(url, '/api/tags');
|
||||||
|
let model: string;
|
||||||
|
|
||||||
|
if (models.length > 0) {
|
||||||
|
const choices = models.map((m) => ({
|
||||||
|
name: m === currentModel ? `${m} (current)` : m,
|
||||||
|
value: m,
|
||||||
|
}));
|
||||||
|
choices.push({ name: 'Custom...', value: '__custom__' });
|
||||||
|
model = await prompt.select<string>('Select model:', choices);
|
||||||
|
if (model === '__custom__') {
|
||||||
|
model = await prompt.input('Model name:', currentModel);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
model = await prompt.input('Model name (could not fetch models):', currentModel ?? 'llama3.2');
|
||||||
|
}
|
||||||
|
|
||||||
|
const result: ProviderFields = { model };
|
||||||
|
if (url) result.url = url;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function setupVllmFields(
|
||||||
|
prompt: ConfigSetupPrompt,
|
||||||
|
fetchModels: ConfigSetupDeps['fetchModels'],
|
||||||
|
currentUrl?: string,
|
||||||
|
currentModel?: string,
|
||||||
|
): Promise<ProviderFields> {
|
||||||
|
const url = await prompt.input('vLLM URL:', currentUrl ?? 'http://localhost:8000');
|
||||||
|
const models = await fetchModels(url, '/v1/models');
|
||||||
|
let model: string;
|
||||||
|
|
||||||
|
if (models.length > 0) {
|
||||||
|
const choices = models.map((m) => ({
|
||||||
|
name: m === currentModel ? `${m} (current)` : m,
|
||||||
|
value: m,
|
||||||
|
}));
|
||||||
|
choices.push({ name: 'Custom...', value: '__custom__' });
|
||||||
|
model = await prompt.select<string>('Select model:', choices);
|
||||||
|
if (model === '__custom__') {
|
||||||
|
model = await prompt.input('Model name:', currentModel);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
model = await prompt.input('Model name (could not fetch models):', currentModel ?? 'default');
|
||||||
|
}
|
||||||
|
|
||||||
|
const result: ProviderFields = { model };
|
||||||
|
if (url) result.url = url;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function setupApiKeyFields(
|
||||||
|
prompt: ConfigSetupPrompt,
|
||||||
|
secretStore: SecretStore,
|
||||||
|
provider: LlmProviderName,
|
||||||
|
secretKey: string,
|
||||||
|
hardcodedModels: string[],
|
||||||
|
currentModel?: string,
|
||||||
|
currentUrl?: string,
|
||||||
|
): Promise<ProviderFields> {
|
||||||
|
const existingKey = await secretStore.get(secretKey);
|
||||||
|
let apiKey: string;
|
||||||
|
|
||||||
|
if (existingKey) {
|
||||||
|
const masked = `****${existingKey.slice(-4)}`;
|
||||||
|
const changeKey = await prompt.confirm(`API key stored (${masked}). Change it?`, false);
|
||||||
|
apiKey = changeKey ? await prompt.password('API key:') : existingKey;
|
||||||
|
} else {
|
||||||
|
apiKey = await prompt.password('API key:');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (apiKey !== existingKey) {
|
||||||
|
await secretStore.set(secretKey, apiKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
let model: string;
|
||||||
|
if (hardcodedModels.length > 0) {
|
||||||
|
const choices = hardcodedModels.map((m) => ({
|
||||||
|
name: m === currentModel ? `${m} (current)` : m,
|
||||||
|
value: m,
|
||||||
|
}));
|
||||||
|
choices.push({ name: 'Custom...', value: '__custom__' });
|
||||||
|
model = await prompt.select<string>('Select model:', choices);
|
||||||
|
if (model === '__custom__') {
|
||||||
|
model = await prompt.input('Model name:', currentModel);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
model = await prompt.input('Model name:', currentModel ?? 'gpt-4o');
|
||||||
|
}
|
||||||
|
|
||||||
|
let url: string | undefined;
|
||||||
|
if (provider === 'openai') {
|
||||||
|
const customUrl = await prompt.confirm('Use custom API endpoint?', false);
|
||||||
|
if (customUrl) {
|
||||||
|
url = await prompt.input('API URL:', currentUrl ?? 'https://api.openai.com');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const result: ProviderFields = { model };
|
||||||
|
if (url) result.url = url;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Configure a single provider type and return its fields. */
|
||||||
|
async function setupProviderFields(
|
||||||
|
providerType: LlmProviderName,
|
||||||
|
prompt: ConfigSetupPrompt,
|
||||||
|
log: (...args: string[]) => void,
|
||||||
|
fetchModels: ConfigSetupDeps['fetchModels'],
|
||||||
|
whichBinary: (name: string) => Promise<string | null>,
|
||||||
|
secretStore: SecretStore,
|
||||||
|
): Promise<ProviderFields> {
|
||||||
|
switch (providerType) {
|
||||||
|
case 'gemini-cli':
|
||||||
|
return setupGeminiCliFields(prompt, log, whichBinary);
|
||||||
|
case 'ollama':
|
||||||
|
return setupOllamaFields(prompt, fetchModels);
|
||||||
|
case 'vllm':
|
||||||
|
return setupVllmFields(prompt, fetchModels);
|
||||||
|
case 'anthropic':
|
||||||
|
return setupApiKeyFields(prompt, secretStore, 'anthropic', 'anthropic-api-key', ANTHROPIC_MODELS);
|
||||||
|
case 'openai':
|
||||||
|
return setupApiKeyFields(prompt, secretStore, 'openai', 'openai-api-key', []);
|
||||||
|
case 'deepseek':
|
||||||
|
return setupApiKeyFields(prompt, secretStore, 'deepseek', 'deepseek-api-key', DEEPSEEK_MODELS);
|
||||||
|
default:
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Build a LlmProviderEntry from type, name, and fields. */
|
||||||
|
function buildEntry(providerType: LlmProviderName, name: string, fields: ProviderFields, tier?: LlmTier): LlmProviderEntry {
|
||||||
|
const entry: LlmProviderEntry = { name, type: providerType };
|
||||||
|
if (fields.model) entry.model = fields.model;
|
||||||
|
if (fields.url) entry.url = fields.url;
|
||||||
|
if (fields.binaryPath) entry.binaryPath = fields.binaryPath;
|
||||||
|
if (tier) entry.tier = tier;
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Simple mode: single provider (legacy format). */
|
||||||
|
async function simpleSetup(
|
||||||
|
config: McpctlConfig,
|
||||||
|
configDeps: Partial<ConfigLoaderDeps>,
|
||||||
|
prompt: ConfigSetupPrompt,
|
||||||
|
log: (...args: string[]) => void,
|
||||||
|
fetchModels: ConfigSetupDeps['fetchModels'],
|
||||||
|
whichBinary: (name: string) => Promise<string | null>,
|
||||||
|
secretStore: SecretStore,
|
||||||
|
): Promise<void> {
|
||||||
|
const currentLlm = config.llm && 'provider' in config.llm ? config.llm : undefined;
|
||||||
|
|
||||||
|
const choices = ALL_PROVIDER_CHOICES.map((c) => {
|
||||||
|
if (currentLlm?.provider === c.value) {
|
||||||
|
return { ...c, name: `${c.name} (current)` };
|
||||||
|
}
|
||||||
|
return c;
|
||||||
|
});
|
||||||
|
|
||||||
|
const provider = await prompt.select<LlmProviderName>('Select LLM provider:', choices);
|
||||||
|
|
||||||
|
if (provider === 'none') {
|
||||||
|
const updated: McpctlConfig = { ...config, llm: { provider: 'none' } };
|
||||||
|
saveConfig(updated, configDeps);
|
||||||
|
log('LLM disabled. Restart mcplocal: systemctl --user restart mcplocal');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const fields = await setupProviderFields(provider, prompt, log, fetchModels, whichBinary, secretStore);
|
||||||
|
const llmConfig: LlmConfig = { provider, ...fields };
|
||||||
|
const updated: McpctlConfig = { ...config, llm: llmConfig };
|
||||||
|
saveConfig(updated, configDeps);
|
||||||
|
log(`\nLLM configured: ${llmConfig.provider}${llmConfig.model ? ` / ${llmConfig.model}` : ''}`);
|
||||||
|
log('Restart mcplocal: systemctl --user restart mcplocal');
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Advanced mode: multiple providers with tier assignments. */
|
||||||
|
async function advancedSetup(
|
||||||
|
config: McpctlConfig,
|
||||||
|
configDeps: Partial<ConfigLoaderDeps>,
|
||||||
|
prompt: ConfigSetupPrompt,
|
||||||
|
log: (...args: string[]) => void,
|
||||||
|
fetchModels: ConfigSetupDeps['fetchModels'],
|
||||||
|
whichBinary: (name: string) => Promise<string | null>,
|
||||||
|
secretStore: SecretStore,
|
||||||
|
): Promise<void> {
|
||||||
|
const entries: LlmProviderEntry[] = [];
|
||||||
|
|
||||||
|
// Fast providers
|
||||||
|
const addFast = await prompt.confirm('Add a FAST provider? (vLLM, Ollama — local, cheap, fast)', true);
|
||||||
|
if (addFast) {
|
||||||
|
let addMore = true;
|
||||||
|
while (addMore) {
|
||||||
|
const providerType = await prompt.select<LlmProviderName>('Fast provider type:', FAST_PROVIDER_CHOICES);
|
||||||
|
const defaultName = providerType === 'vllm' ? 'vllm-local' : providerType;
|
||||||
|
const name = await prompt.input('Provider name:', defaultName);
|
||||||
|
const fields = await setupProviderFields(providerType, prompt, log, fetchModels, whichBinary, secretStore);
|
||||||
|
entries.push(buildEntry(providerType, name, fields, 'fast'));
|
||||||
|
log(` Added: ${name} (${providerType}) → fast tier`);
|
||||||
|
addMore = await prompt.confirm('Add another fast provider?', false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Heavy providers
|
||||||
|
const addHeavy = await prompt.confirm('Add a HEAVY provider? (Gemini, Anthropic, OpenAI — cloud, smart)', true);
|
||||||
|
if (addHeavy) {
|
||||||
|
let addMore = true;
|
||||||
|
while (addMore) {
|
||||||
|
const providerType = await prompt.select<LlmProviderName>('Heavy provider type:', HEAVY_PROVIDER_CHOICES);
|
||||||
|
const defaultName = providerType;
|
||||||
|
const name = await prompt.input('Provider name:', defaultName);
|
||||||
|
const fields = await setupProviderFields(providerType, prompt, log, fetchModels, whichBinary, secretStore);
|
||||||
|
entries.push(buildEntry(providerType, name, fields, 'heavy'));
|
||||||
|
log(` Added: ${name} (${providerType}) → heavy tier`);
|
||||||
|
addMore = await prompt.confirm('Add another heavy provider?', false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (entries.length === 0) {
|
||||||
|
log('No providers configured.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Summary
|
||||||
|
log('\nProvider configuration:');
|
||||||
|
for (const e of entries) {
|
||||||
|
log(` ${e.tier ?? 'unassigned'}: ${e.name} (${e.type})${e.model ? ` / ${e.model}` : ''}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const updated: McpctlConfig = { ...config, llm: { providers: entries } };
|
||||||
|
saveConfig(updated, configDeps);
|
||||||
|
log('\nRestart mcplocal: systemctl --user restart mcplocal');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createConfigSetupCommand(deps?: Partial<ConfigSetupDeps>): Command {
|
||||||
|
return new Command('setup')
|
||||||
|
.description('Interactive LLM provider setup wizard')
|
||||||
|
.action(async () => {
|
||||||
|
const configDeps = deps?.configDeps ?? {};
|
||||||
|
const log = deps?.log ?? ((...args: string[]) => console.log(...args));
|
||||||
|
const prompt = deps?.prompt ?? defaultPrompt;
|
||||||
|
const fetchModels = deps?.fetchModels ?? defaultFetchModels;
|
||||||
|
const whichBinary = deps?.whichBinary ?? defaultWhichBinary;
|
||||||
|
const secretStore = deps?.secretStore ?? await createSecretStore();
|
||||||
|
|
||||||
|
const config = loadConfig(configDeps);
|
||||||
|
|
||||||
|
const mode = await prompt.select<'simple' | 'advanced'>('Setup mode:', [
|
||||||
|
{ name: 'Simple', value: 'simple', description: 'One provider for everything' },
|
||||||
|
{ name: 'Advanced', value: 'advanced', description: 'Multiple providers with fast/heavy tiers' },
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (mode === 'simple') {
|
||||||
|
await simpleSetup(config, configDeps, prompt, log, fetchModels, whichBinary, secretStore);
|
||||||
|
} else {
|
||||||
|
await advancedSetup(config, configDeps, prompt, log, fetchModels, whichBinary, secretStore);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
@@ -6,6 +6,7 @@ import { loadConfig, saveConfig, mergeConfig, getConfigPath, DEFAULT_CONFIG } fr
|
|||||||
import type { McpctlConfig, ConfigLoaderDeps } from '../config/index.js';
|
import type { McpctlConfig, ConfigLoaderDeps } from '../config/index.js';
|
||||||
import { formatJson, formatYaml } from '../formatters/index.js';
|
import { formatJson, formatYaml } from '../formatters/index.js';
|
||||||
import { saveCredentials, loadCredentials } from '../auth/index.js';
|
import { saveCredentials, loadCredentials } from '../auth/index.js';
|
||||||
|
import { createConfigSetupCommand } from './config-setup.js';
|
||||||
import type { CredentialsDeps, StoredCredentials } from '../auth/index.js';
|
import type { CredentialsDeps, StoredCredentials } from '../auth/index.js';
|
||||||
import type { ApiClient } from '../api-client.js';
|
import type { ApiClient } from '../api-client.js';
|
||||||
|
|
||||||
@@ -138,6 +139,8 @@ export function createConfigCommand(deps?: Partial<ConfigCommandDeps>, apiDeps?:
|
|||||||
registerClaudeCommand('claude', false);
|
registerClaudeCommand('claude', false);
|
||||||
registerClaudeCommand('claude-generate', true); // backward compat
|
registerClaudeCommand('claude-generate', true); // backward compat
|
||||||
|
|
||||||
|
config.addCommand(createConfigSetupCommand({ configDeps }));
|
||||||
|
|
||||||
if (apiDeps) {
|
if (apiDeps) {
|
||||||
const { client, credentialsDeps, log: apiLog } = apiDeps;
|
const { client, credentialsDeps, log: apiLog } = apiDeps;
|
||||||
|
|
||||||
|
|||||||
@@ -196,8 +196,6 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
|
|||||||
.argument('<name>', 'Project name')
|
.argument('<name>', 'Project name')
|
||||||
.option('-d, --description <text>', 'Project description', '')
|
.option('-d, --description <text>', 'Project description', '')
|
||||||
.option('--proxy-mode <mode>', 'Proxy mode (direct, filtered)')
|
.option('--proxy-mode <mode>', 'Proxy mode (direct, filtered)')
|
||||||
.option('--proxy-mode-llm-provider <name>', 'LLM provider name (for filtered proxy mode)')
|
|
||||||
.option('--proxy-mode-llm-model <name>', 'LLM model name (for filtered proxy mode)')
|
|
||||||
.option('--prompt <text>', 'Project-level prompt / instructions for the LLM')
|
.option('--prompt <text>', 'Project-level prompt / instructions for the LLM')
|
||||||
.option('--server <name>', 'Server name (repeat for multiple)', collect, [])
|
.option('--server <name>', 'Server name (repeat for multiple)', collect, [])
|
||||||
.option('--force', 'Update if already exists')
|
.option('--force', 'Update if already exists')
|
||||||
@@ -208,8 +206,6 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
|
|||||||
proxyMode: opts.proxyMode ?? 'direct',
|
proxyMode: opts.proxyMode ?? 'direct',
|
||||||
};
|
};
|
||||||
if (opts.prompt) body.prompt = opts.prompt;
|
if (opts.prompt) body.prompt = opts.prompt;
|
||||||
if (opts.proxyModeLlmProvider) body.llmProvider = opts.proxyModeLlmProvider;
|
|
||||||
if (opts.proxyModeLlmModel) body.llmModel = opts.proxyModeLlmModel;
|
|
||||||
if (opts.server.length > 0) body.servers = opts.server;
|
if (opts.server.length > 0) body.servers = opts.server;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -379,5 +375,31 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
|
|||||||
log(`prompt '${prompt.name}' created (id: ${prompt.id})`);
|
log(`prompt '${prompt.name}' created (id: ${prompt.id})`);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// --- create promptrequest ---
|
||||||
|
cmd.command('promptrequest')
|
||||||
|
.description('Create a prompt request (pending proposal that needs approval)')
|
||||||
|
.argument('<name>', 'Prompt request name (lowercase alphanumeric with hyphens)')
|
||||||
|
.requiredOption('--project <name>', 'Project name (required)')
|
||||||
|
.option('--content <text>', 'Prompt content text')
|
||||||
|
.option('--content-file <path>', 'Read prompt content from file')
|
||||||
|
.action(async (name: string, opts) => {
|
||||||
|
let content = opts.content as string | undefined;
|
||||||
|
if (opts.contentFile) {
|
||||||
|
const fs = await import('node:fs/promises');
|
||||||
|
content = await fs.readFile(opts.contentFile as string, 'utf-8');
|
||||||
|
}
|
||||||
|
if (!content) {
|
||||||
|
throw new Error('--content or --content-file is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectName = opts.project as string;
|
||||||
|
const pr = await client.post<{ id: string; name: string }>(
|
||||||
|
`/api/v1/projects/${encodeURIComponent(projectName)}/promptrequests`,
|
||||||
|
{ name, content },
|
||||||
|
);
|
||||||
|
log(`prompt request '${pr.name}' created (id: ${pr.id})`);
|
||||||
|
log(` approve with: mcpctl approve promptrequest ${pr.name}`);
|
||||||
|
});
|
||||||
|
|
||||||
return cmd;
|
return cmd;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,11 +7,31 @@ import type { CredentialsDeps } from '../auth/index.js';
|
|||||||
import { formatJson, formatYaml } from '../formatters/index.js';
|
import { formatJson, formatYaml } from '../formatters/index.js';
|
||||||
import { APP_VERSION } from '@mcpctl/shared';
|
import { APP_VERSION } from '@mcpctl/shared';
|
||||||
|
|
||||||
|
// ANSI helpers
|
||||||
|
const GREEN = '\x1b[32m';
|
||||||
|
const RED = '\x1b[31m';
|
||||||
|
const DIM = '\x1b[2m';
|
||||||
|
const RESET = '\x1b[0m';
|
||||||
|
const CLEAR_LINE = '\x1b[2K\r';
|
||||||
|
|
||||||
|
interface ProvidersInfo {
|
||||||
|
providers: string[];
|
||||||
|
tiers: { fast: string[]; heavy: string[] };
|
||||||
|
}
|
||||||
|
|
||||||
export interface StatusCommandDeps {
|
export interface StatusCommandDeps {
|
||||||
configDeps: Partial<ConfigLoaderDeps>;
|
configDeps: Partial<ConfigLoaderDeps>;
|
||||||
credentialsDeps: Partial<CredentialsDeps>;
|
credentialsDeps: Partial<CredentialsDeps>;
|
||||||
log: (...args: string[]) => void;
|
log: (...args: string[]) => void;
|
||||||
|
write: (text: string) => void;
|
||||||
checkHealth: (url: string) => Promise<boolean>;
|
checkHealth: (url: string) => Promise<boolean>;
|
||||||
|
/** Check LLM health via mcplocal's /llm/health endpoint */
|
||||||
|
checkLlm: (mcplocalUrl: string) => Promise<string>;
|
||||||
|
/** Fetch available models from mcplocal's /llm/models endpoint */
|
||||||
|
fetchModels: (mcplocalUrl: string) => Promise<string[]>;
|
||||||
|
/** Fetch provider tier info from mcplocal's /llm/providers endpoint */
|
||||||
|
fetchProviders: (mcplocalUrl: string) => Promise<ProvidersInfo | null>;
|
||||||
|
isTTY: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
function defaultCheckHealth(url: string): Promise<boolean> {
|
function defaultCheckHealth(url: string): Promise<boolean> {
|
||||||
@@ -28,15 +48,114 @@ function defaultCheckHealth(url: string): Promise<boolean> {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check LLM health by querying mcplocal's /llm/health endpoint.
|
||||||
|
* This tests the actual provider running inside the daemon (uses persistent ACP for gemini, etc.)
|
||||||
|
*/
|
||||||
|
function defaultCheckLlm(mcplocalUrl: string): Promise<string> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const req = http.get(`${mcplocalUrl}/llm/health`, { timeout: 45000 }, (res) => {
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||||
|
res.on('end', () => {
|
||||||
|
try {
|
||||||
|
const body = JSON.parse(Buffer.concat(chunks).toString('utf-8')) as { status: string; error?: string };
|
||||||
|
if (body.status === 'ok') {
|
||||||
|
resolve('ok');
|
||||||
|
} else if (body.status === 'not configured') {
|
||||||
|
resolve('not configured');
|
||||||
|
} else if (body.error) {
|
||||||
|
resolve(body.error.slice(0, 80));
|
||||||
|
} else {
|
||||||
|
resolve(body.status);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
resolve('invalid response');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
req.on('error', () => resolve('mcplocal unreachable'));
|
||||||
|
req.on('timeout', () => { req.destroy(); resolve('timeout'); });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function defaultFetchModels(mcplocalUrl: string): Promise<string[]> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const req = http.get(`${mcplocalUrl}/llm/models`, { timeout: 5000 }, (res) => {
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||||
|
res.on('end', () => {
|
||||||
|
try {
|
||||||
|
const body = JSON.parse(Buffer.concat(chunks).toString('utf-8')) as { models?: string[] };
|
||||||
|
resolve(body.models ?? []);
|
||||||
|
} catch {
|
||||||
|
resolve([]);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
req.on('error', () => resolve([]));
|
||||||
|
req.on('timeout', () => { req.destroy(); resolve([]); });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function defaultFetchProviders(mcplocalUrl: string): Promise<ProvidersInfo | null> {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const req = http.get(`${mcplocalUrl}/llm/providers`, { timeout: 5000 }, (res) => {
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||||
|
res.on('end', () => {
|
||||||
|
try {
|
||||||
|
const body = JSON.parse(Buffer.concat(chunks).toString('utf-8')) as ProvidersInfo;
|
||||||
|
resolve(body);
|
||||||
|
} catch {
|
||||||
|
resolve(null);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
req.on('error', () => resolve(null));
|
||||||
|
req.on('timeout', () => { req.destroy(); resolve(null); });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const SPINNER_FRAMES = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];
|
||||||
|
|
||||||
const defaultDeps: StatusCommandDeps = {
|
const defaultDeps: StatusCommandDeps = {
|
||||||
configDeps: {},
|
configDeps: {},
|
||||||
credentialsDeps: {},
|
credentialsDeps: {},
|
||||||
log: (...args) => console.log(...args),
|
log: (...args) => console.log(...args),
|
||||||
|
write: (text) => process.stdout.write(text),
|
||||||
checkHealth: defaultCheckHealth,
|
checkHealth: defaultCheckHealth,
|
||||||
|
checkLlm: defaultCheckLlm,
|
||||||
|
fetchModels: defaultFetchModels,
|
||||||
|
fetchProviders: defaultFetchProviders,
|
||||||
|
isTTY: process.stdout.isTTY ?? false,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/** Determine LLM label from config (handles both legacy and multi-provider formats). */
|
||||||
|
function getLlmLabel(llm: unknown): string | null {
|
||||||
|
if (!llm || typeof llm !== 'object') return null;
|
||||||
|
// Legacy format: { provider, model }
|
||||||
|
if ('provider' in llm) {
|
||||||
|
const legacy = llm as { provider: string; model?: string };
|
||||||
|
if (legacy.provider === 'none') return null;
|
||||||
|
return `${legacy.provider}${legacy.model ? ` / ${legacy.model}` : ''}`;
|
||||||
|
}
|
||||||
|
// Multi-provider format: { providers: [...] }
|
||||||
|
if ('providers' in llm) {
|
||||||
|
const multi = llm as { providers: Array<{ name: string; type: string; tier?: string }> };
|
||||||
|
if (multi.providers.length === 0) return null;
|
||||||
|
return multi.providers.map((p) => `${p.name}${p.tier ? ` (${p.tier})` : ''}`).join(', ');
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Check if config uses multi-provider format. */
|
||||||
|
function isMultiProvider(llm: unknown): boolean {
|
||||||
|
return !!llm && typeof llm === 'object' && 'providers' in llm;
|
||||||
|
}
|
||||||
|
|
||||||
export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command {
|
export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command {
|
||||||
const { configDeps, credentialsDeps, log, checkHealth } = { ...defaultDeps, ...deps };
|
const { configDeps, credentialsDeps, log, write, checkHealth, checkLlm, fetchModels, fetchProviders, isTTY } = { ...defaultDeps, ...deps };
|
||||||
|
|
||||||
return new Command('status')
|
return new Command('status')
|
||||||
.description('Show mcpctl status and connectivity')
|
.description('Show mcpctl status and connectivity')
|
||||||
@@ -45,33 +164,116 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
|
|||||||
const config = loadConfig(configDeps);
|
const config = loadConfig(configDeps);
|
||||||
const creds = loadCredentials(credentialsDeps);
|
const creds = loadCredentials(credentialsDeps);
|
||||||
|
|
||||||
|
const llmLabel = getLlmLabel(config.llm);
|
||||||
|
const multiProvider = isMultiProvider(config.llm);
|
||||||
|
|
||||||
|
if (opts.output !== 'table') {
|
||||||
|
// JSON/YAML: run everything in parallel, wait, output at once
|
||||||
|
const [mcplocalReachable, mcpdReachable, llmStatus, providersInfo] = await Promise.all([
|
||||||
|
checkHealth(config.mcplocalUrl),
|
||||||
|
checkHealth(config.mcpdUrl),
|
||||||
|
llmLabel ? checkLlm(config.mcplocalUrl) : Promise.resolve(null),
|
||||||
|
multiProvider ? fetchProviders(config.mcplocalUrl) : Promise.resolve(null),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const llm = llmLabel
|
||||||
|
? llmStatus === 'ok' ? llmLabel : `${llmLabel} (${llmStatus})`
|
||||||
|
: null;
|
||||||
|
|
||||||
|
const status = {
|
||||||
|
version: APP_VERSION,
|
||||||
|
mcplocalUrl: config.mcplocalUrl,
|
||||||
|
mcplocalReachable,
|
||||||
|
mcpdUrl: config.mcpdUrl,
|
||||||
|
mcpdReachable,
|
||||||
|
auth: creds ? { user: creds.user } : null,
|
||||||
|
registries: config.registries,
|
||||||
|
outputFormat: config.outputFormat,
|
||||||
|
llm,
|
||||||
|
llmStatus,
|
||||||
|
...(providersInfo ? { providers: providersInfo } : {}),
|
||||||
|
};
|
||||||
|
|
||||||
|
log(opts.output === 'json' ? formatJson(status) : formatYaml(status));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Table format: print lines progressively, LLM last with spinner
|
||||||
|
|
||||||
|
// Fast health checks first
|
||||||
const [mcplocalReachable, mcpdReachable] = await Promise.all([
|
const [mcplocalReachable, mcpdReachable] = await Promise.all([
|
||||||
checkHealth(config.mcplocalUrl),
|
checkHealth(config.mcplocalUrl),
|
||||||
checkHealth(config.mcpdUrl),
|
checkHealth(config.mcpdUrl),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const status = {
|
log(`mcpctl v${APP_VERSION}`);
|
||||||
version: APP_VERSION,
|
log(`mcplocal: ${config.mcplocalUrl} (${mcplocalReachable ? 'connected' : 'unreachable'})`);
|
||||||
mcplocalUrl: config.mcplocalUrl,
|
log(`mcpd: ${config.mcpdUrl} (${mcpdReachable ? 'connected' : 'unreachable'})`);
|
||||||
mcplocalReachable,
|
log(`Auth: ${creds ? `logged in as ${creds.user}` : 'not logged in'}`);
|
||||||
mcpdUrl: config.mcpdUrl,
|
log(`Registries: ${config.registries.join(', ')}`);
|
||||||
mcpdReachable,
|
log(`Output: ${config.outputFormat}`);
|
||||||
auth: creds ? { user: creds.user } : null,
|
|
||||||
registries: config.registries,
|
|
||||||
outputFormat: config.outputFormat,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (opts.output === 'json') {
|
if (!llmLabel) {
|
||||||
log(formatJson(status));
|
log(`LLM: not configured (run 'mcpctl config setup')`);
|
||||||
} else if (opts.output === 'yaml') {
|
return;
|
||||||
log(formatYaml(status));
|
}
|
||||||
|
|
||||||
|
// LLM check + models + providers fetch in parallel
|
||||||
|
const llmPromise = checkLlm(config.mcplocalUrl);
|
||||||
|
const modelsPromise = fetchModels(config.mcplocalUrl);
|
||||||
|
const providersPromise = multiProvider ? fetchProviders(config.mcplocalUrl) : Promise.resolve(null);
|
||||||
|
|
||||||
|
if (isTTY) {
|
||||||
|
let frame = 0;
|
||||||
|
const interval = setInterval(() => {
|
||||||
|
write(`${CLEAR_LINE}LLM: ${DIM}${SPINNER_FRAMES[frame % SPINNER_FRAMES.length]} checking...${RESET}`);
|
||||||
|
frame++;
|
||||||
|
}, 80);
|
||||||
|
|
||||||
|
const [llmStatus, models, providersInfo] = await Promise.all([llmPromise, modelsPromise, providersPromise]);
|
||||||
|
clearInterval(interval);
|
||||||
|
|
||||||
|
if (providersInfo && (providersInfo.tiers.fast.length > 0 || providersInfo.tiers.heavy.length > 0)) {
|
||||||
|
// Tiered display
|
||||||
|
write(`${CLEAR_LINE}`);
|
||||||
|
if (providersInfo.tiers.fast.length > 0) {
|
||||||
|
log(`LLM (fast): ${providersInfo.tiers.fast.join(', ')} ${GREEN}✓${RESET}`);
|
||||||
|
}
|
||||||
|
if (providersInfo.tiers.heavy.length > 0) {
|
||||||
|
log(`LLM (heavy): ${providersInfo.tiers.heavy.join(', ')} ${GREEN}✓${RESET}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Legacy single provider display
|
||||||
|
if (llmStatus === 'ok' || llmStatus === 'ok (key stored)') {
|
||||||
|
write(`${CLEAR_LINE}LLM: ${llmLabel} ${GREEN}✓ ${llmStatus}${RESET}\n`);
|
||||||
|
} else {
|
||||||
|
write(`${CLEAR_LINE}LLM: ${llmLabel} ${RED}✗ ${llmStatus}${RESET}\n`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (models.length > 0) {
|
||||||
|
log(`${DIM} Available: ${models.join(', ')}${RESET}`);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
log(`mcpctl v${status.version}`);
|
// Non-TTY: no spinner, just wait and print
|
||||||
log(`mcplocal: ${status.mcplocalUrl} (${mcplocalReachable ? 'connected' : 'unreachable'})`);
|
const [llmStatus, models, providersInfo] = await Promise.all([llmPromise, modelsPromise, providersPromise]);
|
||||||
log(`mcpd: ${status.mcpdUrl} (${mcpdReachable ? 'connected' : 'unreachable'})`);
|
|
||||||
log(`Auth: ${creds ? `logged in as ${creds.user}` : 'not logged in'}`);
|
if (providersInfo && (providersInfo.tiers.fast.length > 0 || providersInfo.tiers.heavy.length > 0)) {
|
||||||
log(`Registries: ${status.registries.join(', ')}`);
|
if (providersInfo.tiers.fast.length > 0) {
|
||||||
log(`Output: ${status.outputFormat}`);
|
log(`LLM (fast): ${providersInfo.tiers.fast.join(', ')}`);
|
||||||
|
}
|
||||||
|
if (providersInfo.tiers.heavy.length > 0) {
|
||||||
|
log(`LLM (heavy): ${providersInfo.tiers.heavy.join(', ')}`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (llmStatus === 'ok' || llmStatus === 'ok (key stored)') {
|
||||||
|
log(`LLM: ${llmLabel} ✓ ${llmStatus}`);
|
||||||
|
} else {
|
||||||
|
log(`LLM: ${llmLabel} ✗ ${llmStatus}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (models.length > 0) {
|
||||||
|
log(`${DIM} Available: ${models.join(', ')}${RESET}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
export { McpctlConfigSchema, DEFAULT_CONFIG } from './schema.js';
|
export { McpctlConfigSchema, LlmConfigSchema, LlmProviderEntrySchema, LlmMultiConfigSchema, LLM_PROVIDERS, LLM_TIERS, DEFAULT_CONFIG } from './schema.js';
|
||||||
export type { McpctlConfig } from './schema.js';
|
export type { McpctlConfig, LlmConfig, LlmProviderEntry, LlmMultiConfig, LlmProviderName, LlmTier } from './schema.js';
|
||||||
export { loadConfig, saveConfig, mergeConfig, getConfigPath } from './loader.js';
|
export { loadConfig, saveConfig, mergeConfig, getConfigPath } from './loader.js';
|
||||||
export type { ConfigLoaderDeps } from './loader.js';
|
export type { ConfigLoaderDeps } from './loader.js';
|
||||||
|
|||||||
@@ -1,5 +1,50 @@
|
|||||||
import { z } from 'zod';
|
import { z } from 'zod';
|
||||||
|
|
||||||
|
export const LLM_PROVIDERS = ['gemini-cli', 'ollama', 'anthropic', 'openai', 'deepseek', 'vllm', 'none'] as const;
|
||||||
|
export type LlmProviderName = typeof LLM_PROVIDERS[number];
|
||||||
|
|
||||||
|
export const LLM_TIERS = ['fast', 'heavy'] as const;
|
||||||
|
export type LlmTier = typeof LLM_TIERS[number];
|
||||||
|
|
||||||
|
/** Legacy single-provider format. */
|
||||||
|
export const LlmConfigSchema = z.object({
|
||||||
|
/** LLM provider name */
|
||||||
|
provider: z.enum(LLM_PROVIDERS),
|
||||||
|
/** Model name */
|
||||||
|
model: z.string().optional(),
|
||||||
|
/** Provider URL (for ollama, vllm, openai with custom endpoint) */
|
||||||
|
url: z.string().optional(),
|
||||||
|
/** Binary path override (for gemini-cli) */
|
||||||
|
binaryPath: z.string().optional(),
|
||||||
|
}).strict();
|
||||||
|
|
||||||
|
export type LlmConfig = z.infer<typeof LlmConfigSchema>;
|
||||||
|
|
||||||
|
/** Multi-provider entry (advanced mode). */
|
||||||
|
export const LlmProviderEntrySchema = z.object({
|
||||||
|
/** User-chosen name for this provider instance (e.g. "vllm-local") */
|
||||||
|
name: z.string(),
|
||||||
|
/** Provider type */
|
||||||
|
type: z.enum(LLM_PROVIDERS),
|
||||||
|
/** Model name */
|
||||||
|
model: z.string().optional(),
|
||||||
|
/** Provider URL (for ollama, vllm, openai with custom endpoint) */
|
||||||
|
url: z.string().optional(),
|
||||||
|
/** Binary path override (for gemini-cli) */
|
||||||
|
binaryPath: z.string().optional(),
|
||||||
|
/** Tier assignment */
|
||||||
|
tier: z.enum(LLM_TIERS).optional(),
|
||||||
|
}).strict();
|
||||||
|
|
||||||
|
export type LlmProviderEntry = z.infer<typeof LlmProviderEntrySchema>;
|
||||||
|
|
||||||
|
/** Multi-provider format with providers array. */
|
||||||
|
export const LlmMultiConfigSchema = z.object({
|
||||||
|
providers: z.array(LlmProviderEntrySchema).min(1),
|
||||||
|
}).strict();
|
||||||
|
|
||||||
|
export type LlmMultiConfig = z.infer<typeof LlmMultiConfigSchema>;
|
||||||
|
|
||||||
export const McpctlConfigSchema = z.object({
|
export const McpctlConfigSchema = z.object({
|
||||||
/** mcplocal daemon endpoint (local LLM pre-processing proxy) */
|
/** mcplocal daemon endpoint (local LLM pre-processing proxy) */
|
||||||
mcplocalUrl: z.string().default('http://localhost:3200'),
|
mcplocalUrl: z.string().default('http://localhost:3200'),
|
||||||
@@ -19,6 +64,8 @@ export const McpctlConfigSchema = z.object({
|
|||||||
outputFormat: z.enum(['table', 'json', 'yaml']).default('table'),
|
outputFormat: z.enum(['table', 'json', 'yaml']).default('table'),
|
||||||
/** Smithery API key */
|
/** Smithery API key */
|
||||||
smitheryApiKey: z.string().optional(),
|
smitheryApiKey: z.string().optional(),
|
||||||
|
/** LLM provider configuration — accepts legacy single-provider or multi-provider format */
|
||||||
|
llm: z.union([LlmConfigSchema, LlmMultiConfigSchema]).optional(),
|
||||||
}).transform((cfg) => {
|
}).transform((cfg) => {
|
||||||
// Backward compatibility: if old daemonUrl is set but mcplocalUrl wasn't explicitly changed,
|
// Backward compatibility: if old daemonUrl is set but mcplocalUrl wasn't explicitly changed,
|
||||||
// use daemonUrl as mcplocalUrl
|
// use daemonUrl as mcplocalUrl
|
||||||
|
|||||||
293
src/cli/tests/commands/config-setup.test.ts
Normal file
293
src/cli/tests/commands/config-setup.test.ts
Normal file
@@ -0,0 +1,293 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
import { createConfigSetupCommand } from '../../src/commands/config-setup.js';
|
||||||
|
import type { ConfigSetupDeps, ConfigSetupPrompt } from '../../src/commands/config-setup.js';
|
||||||
|
import type { SecretStore } from '@mcpctl/shared';
|
||||||
|
import { mkdtempSync, rmSync, readFileSync } from 'node:fs';
|
||||||
|
import { join } from 'node:path';
|
||||||
|
import { tmpdir } from 'node:os';
|
||||||
|
|
||||||
|
let tempDir: string;
|
||||||
|
let logs: string[];
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-config-setup-test-'));
|
||||||
|
logs = [];
|
||||||
|
});
|
||||||
|
|
||||||
|
function cleanup() {
|
||||||
|
rmSync(tempDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
function mockSecretStore(secrets: Record<string, string> = {}): SecretStore {
|
||||||
|
const store: Record<string, string> = { ...secrets };
|
||||||
|
return {
|
||||||
|
get: vi.fn(async (key: string) => store[key] ?? null),
|
||||||
|
set: vi.fn(async (key: string, value: string) => { store[key] = value; }),
|
||||||
|
delete: vi.fn(async () => true),
|
||||||
|
backend: () => 'mock',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function mockPrompt(answers: unknown[]): ConfigSetupPrompt {
|
||||||
|
let callIndex = 0;
|
||||||
|
return {
|
||||||
|
select: vi.fn(async () => answers[callIndex++]),
|
||||||
|
input: vi.fn(async () => answers[callIndex++] as string),
|
||||||
|
password: vi.fn(async () => answers[callIndex++] as string),
|
||||||
|
confirm: vi.fn(async () => answers[callIndex++] as boolean),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildDeps(overrides: {
|
||||||
|
secrets?: Record<string, string>;
|
||||||
|
answers?: unknown[];
|
||||||
|
fetchModels?: ConfigSetupDeps['fetchModels'];
|
||||||
|
whichBinary?: ConfigSetupDeps['whichBinary'];
|
||||||
|
} = {}): ConfigSetupDeps {
|
||||||
|
return {
|
||||||
|
configDeps: { configDir: tempDir },
|
||||||
|
secretStore: mockSecretStore(overrides.secrets),
|
||||||
|
log: (...args: string[]) => logs.push(args.join(' ')),
|
||||||
|
prompt: mockPrompt(overrides.answers ?? []),
|
||||||
|
fetchModels: overrides.fetchModels ?? vi.fn(async () => []),
|
||||||
|
whichBinary: overrides.whichBinary ?? vi.fn(async () => '/usr/bin/gemini'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function readConfig(): Record<string, unknown> {
|
||||||
|
const raw = readFileSync(join(tempDir, 'config.json'), 'utf-8');
|
||||||
|
return JSON.parse(raw) as Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runSetup(deps: ConfigSetupDeps): Promise<void> {
|
||||||
|
const cmd = createConfigSetupCommand(deps);
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('config setup wizard', () => {
|
||||||
|
describe('provider: none', () => {
|
||||||
|
it('disables LLM and saves config', async () => {
|
||||||
|
const deps = buildDeps({ answers: ['simple', 'none'] });
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
const config = readConfig();
|
||||||
|
expect(config.llm).toEqual({ provider: 'none' });
|
||||||
|
expect(logs.some((l) => l.includes('LLM disabled'))).toBe(true);
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('provider: gemini-cli', () => {
|
||||||
|
it('auto-detects binary path and saves config', async () => {
|
||||||
|
// Answers: select provider, select model (no binary prompt — auto-detected)
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['simple', 'gemini-cli', 'gemini-2.5-flash'],
|
||||||
|
whichBinary: vi.fn(async () => '/home/user/.npm-global/bin/gemini'),
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.provider).toBe('gemini-cli');
|
||||||
|
expect(llm.model).toBe('gemini-2.5-flash');
|
||||||
|
expect(llm.binaryPath).toBe('/home/user/.npm-global/bin/gemini');
|
||||||
|
expect(logs.some((l) => l.includes('Found gemini at'))).toBe(true);
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('prompts for manual path when binary not found', async () => {
|
||||||
|
// Answers: select provider, select model, enter manual path
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['simple', 'gemini-cli', 'gemini-2.5-flash', '/opt/gemini'],
|
||||||
|
whichBinary: vi.fn(async () => null),
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.binaryPath).toBe('/opt/gemini');
|
||||||
|
expect(logs.some((l) => l.includes('not found'))).toBe(true);
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('saves gemini-cli with custom model', async () => {
|
||||||
|
// Answers: select provider, select custom, enter model name
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['simple', 'gemini-cli', '__custom__', 'gemini-3.0-flash'],
|
||||||
|
whichBinary: vi.fn(async () => '/usr/bin/gemini'),
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.model).toBe('gemini-3.0-flash');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('provider: ollama', () => {
|
||||||
|
it('fetches models and allows selection', async () => {
|
||||||
|
const fetchModels = vi.fn(async () => ['llama3.2', 'codellama', 'mistral']);
|
||||||
|
// Answers: select provider, enter URL, select model
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['simple', 'ollama', 'http://localhost:11434', 'codellama'],
|
||||||
|
fetchModels,
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(fetchModels).toHaveBeenCalledWith('http://localhost:11434', '/api/tags');
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.provider).toBe('ollama');
|
||||||
|
expect(llm.model).toBe('codellama');
|
||||||
|
expect(llm.url).toBe('http://localhost:11434');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('falls back to manual input when fetch fails', async () => {
|
||||||
|
const fetchModels = vi.fn(async () => []);
|
||||||
|
// Answers: select provider, enter URL, enter model manually
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['simple', 'ollama', 'http://localhost:11434', 'llama3.2'],
|
||||||
|
fetchModels,
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
const config = readConfig();
|
||||||
|
expect((config.llm as Record<string, unknown>).model).toBe('llama3.2');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('provider: anthropic', () => {
|
||||||
|
it('prompts for API key and saves to secret store', async () => {
|
||||||
|
// Answers: select provider, enter API key, select model
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['simple', 'anthropic', 'sk-ant-new-key', 'claude-haiku-3-5-20241022'],
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(deps.secretStore.set).toHaveBeenCalledWith('anthropic-api-key', 'sk-ant-new-key');
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.provider).toBe('anthropic');
|
||||||
|
expect(llm.model).toBe('claude-haiku-3-5-20241022');
|
||||||
|
// API key should NOT be in config file
|
||||||
|
expect(llm).not.toHaveProperty('apiKey');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows existing key masked and allows keeping it', async () => {
|
||||||
|
// Answers: select provider, confirm change=false, select model
|
||||||
|
const deps = buildDeps({
|
||||||
|
secrets: { 'anthropic-api-key': 'sk-ant-existing-key-1234' },
|
||||||
|
answers: ['simple', 'anthropic', false, 'claude-sonnet-4-20250514'],
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
// Should NOT have called set (kept existing key)
|
||||||
|
expect(deps.secretStore.set).not.toHaveBeenCalled();
|
||||||
|
const config = readConfig();
|
||||||
|
expect((config.llm as Record<string, unknown>).model).toBe('claude-sonnet-4-20250514');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('allows replacing existing key', async () => {
|
||||||
|
// Answers: select provider, confirm change=true, enter new key, select model
|
||||||
|
const deps = buildDeps({
|
||||||
|
secrets: { 'anthropic-api-key': 'sk-ant-old' },
|
||||||
|
answers: ['simple', 'anthropic', true, 'sk-ant-new', 'claude-haiku-3-5-20241022'],
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(deps.secretStore.set).toHaveBeenCalledWith('anthropic-api-key', 'sk-ant-new');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('provider: vllm', () => {
|
||||||
|
it('fetches models from vLLM and allows selection', async () => {
|
||||||
|
const fetchModels = vi.fn(async () => ['my-model', 'llama-70b']);
|
||||||
|
// Answers: select provider, enter URL, select model
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['simple', 'vllm', 'http://gpu:8000', 'llama-70b'],
|
||||||
|
fetchModels,
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(fetchModels).toHaveBeenCalledWith('http://gpu:8000', '/v1/models');
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.provider).toBe('vllm');
|
||||||
|
expect(llm.url).toBe('http://gpu:8000');
|
||||||
|
expect(llm.model).toBe('llama-70b');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('provider: openai', () => {
|
||||||
|
it('prompts for key, model, and optional custom endpoint', async () => {
|
||||||
|
// Answers: select provider, enter key, enter model, confirm custom URL=true, enter URL
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['simple', 'openai', 'sk-openai-key', 'gpt-4o', true, 'https://custom.api.com'],
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(deps.secretStore.set).toHaveBeenCalledWith('openai-api-key', 'sk-openai-key');
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.provider).toBe('openai');
|
||||||
|
expect(llm.model).toBe('gpt-4o');
|
||||||
|
expect(llm.url).toBe('https://custom.api.com');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('skips custom URL when not requested', async () => {
|
||||||
|
// Answers: select provider, enter key, enter model, confirm custom URL=false
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['simple', 'openai', 'sk-openai-key', 'gpt-4o-mini', false],
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.url).toBeUndefined();
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('provider: deepseek', () => {
|
||||||
|
it('prompts for key and model', async () => {
|
||||||
|
// Answers: select provider, enter key, select model
|
||||||
|
const deps = buildDeps({
|
||||||
|
answers: ['simple', 'deepseek', 'sk-ds-key', 'deepseek-chat'],
|
||||||
|
});
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(deps.secretStore.set).toHaveBeenCalledWith('deepseek-api-key', 'sk-ds-key');
|
||||||
|
const config = readConfig();
|
||||||
|
const llm = config.llm as Record<string, unknown>;
|
||||||
|
expect(llm.provider).toBe('deepseek');
|
||||||
|
expect(llm.model).toBe('deepseek-chat');
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('output messages', () => {
|
||||||
|
it('shows restart instruction', async () => {
|
||||||
|
const deps = buildDeps({ answers: ['simple', 'gemini-cli', 'gemini-2.5-flash'] });
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(logs.some((l) => l.includes('systemctl --user restart mcplocal'))).toBe(true);
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows configured provider and model', async () => {
|
||||||
|
const deps = buildDeps({ answers: ['simple', 'gemini-cli', 'gemini-2.5-flash'] });
|
||||||
|
await runSetup(deps);
|
||||||
|
|
||||||
|
expect(logs.some((l) => l.includes('gemini-cli') && l.includes('gemini-2.5-flash'))).toBe(true);
|
||||||
|
cleanup();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -30,8 +30,6 @@ describe('project with new fields', () => {
|
|||||||
'project', 'smart-home',
|
'project', 'smart-home',
|
||||||
'-d', 'Smart home project',
|
'-d', 'Smart home project',
|
||||||
'--proxy-mode', 'filtered',
|
'--proxy-mode', 'filtered',
|
||||||
'--proxy-mode-llm-provider', 'gemini-cli',
|
|
||||||
'--proxy-mode-llm-model', 'gemini-2.0-flash',
|
|
||||||
'--server', 'my-grafana',
|
'--server', 'my-grafana',
|
||||||
'--server', 'my-ha',
|
'--server', 'my-ha',
|
||||||
], { from: 'user' });
|
], { from: 'user' });
|
||||||
@@ -40,8 +38,6 @@ describe('project with new fields', () => {
|
|||||||
name: 'smart-home',
|
name: 'smart-home',
|
||||||
description: 'Smart home project',
|
description: 'Smart home project',
|
||||||
proxyMode: 'filtered',
|
proxyMode: 'filtered',
|
||||||
llmProvider: 'gemini-cli',
|
|
||||||
llmModel: 'gemini-2.0-flash',
|
|
||||||
servers: ['my-grafana', 'my-ha'],
|
servers: ['my-grafana', 'my-ha'],
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -3,19 +3,39 @@ import { mkdtempSync, rmSync } from 'node:fs';
|
|||||||
import { join } from 'node:path';
|
import { join } from 'node:path';
|
||||||
import { tmpdir } from 'node:os';
|
import { tmpdir } from 'node:os';
|
||||||
import { createStatusCommand } from '../../src/commands/status.js';
|
import { createStatusCommand } from '../../src/commands/status.js';
|
||||||
|
import type { StatusCommandDeps } from '../../src/commands/status.js';
|
||||||
import { saveConfig, DEFAULT_CONFIG } from '../../src/config/index.js';
|
import { saveConfig, DEFAULT_CONFIG } from '../../src/config/index.js';
|
||||||
import { saveCredentials } from '../../src/auth/index.js';
|
import { saveCredentials } from '../../src/auth/index.js';
|
||||||
|
|
||||||
let tempDir: string;
|
let tempDir: string;
|
||||||
let output: string[];
|
let output: string[];
|
||||||
|
let written: string[];
|
||||||
|
|
||||||
function log(...args: string[]) {
|
function log(...args: string[]) {
|
||||||
output.push(args.join(' '));
|
output.push(args.join(' '));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function write(text: string) {
|
||||||
|
written.push(text);
|
||||||
|
}
|
||||||
|
|
||||||
|
function baseDeps(overrides?: Partial<StatusCommandDeps>): Partial<StatusCommandDeps> {
|
||||||
|
return {
|
||||||
|
configDeps: { configDir: tempDir },
|
||||||
|
credentialsDeps: { configDir: tempDir },
|
||||||
|
log,
|
||||||
|
write,
|
||||||
|
checkHealth: async () => true,
|
||||||
|
fetchProviders: async () => null,
|
||||||
|
isTTY: false,
|
||||||
|
...overrides,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-status-test-'));
|
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-status-test-'));
|
||||||
output = [];
|
output = [];
|
||||||
|
written = [];
|
||||||
});
|
});
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -24,12 +44,7 @@ afterEach(() => {
|
|||||||
|
|
||||||
describe('status command', () => {
|
describe('status command', () => {
|
||||||
it('shows status in table format', async () => {
|
it('shows status in table format', async () => {
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand(baseDeps());
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
|
||||||
checkHealth: async () => true,
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
const out = output.join('\n');
|
const out = output.join('\n');
|
||||||
expect(out).toContain('mcpctl v');
|
expect(out).toContain('mcpctl v');
|
||||||
@@ -39,46 +54,26 @@ describe('status command', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('shows unreachable when daemons are down', async () => {
|
it('shows unreachable when daemons are down', async () => {
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand(baseDeps({ checkHealth: async () => false }));
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
|
||||||
checkHealth: async () => false,
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
expect(output.join('\n')).toContain('unreachable');
|
expect(output.join('\n')).toContain('unreachable');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('shows not logged in when no credentials', async () => {
|
it('shows not logged in when no credentials', async () => {
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand(baseDeps());
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
|
||||||
checkHealth: async () => true,
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
expect(output.join('\n')).toContain('not logged in');
|
expect(output.join('\n')).toContain('not logged in');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('shows logged in user when credentials exist', async () => {
|
it('shows logged in user when credentials exist', async () => {
|
||||||
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'alice@example.com' }, { configDir: tempDir });
|
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'alice@example.com' }, { configDir: tempDir });
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand(baseDeps());
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
|
||||||
checkHealth: async () => true,
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
expect(output.join('\n')).toContain('logged in as alice@example.com');
|
expect(output.join('\n')).toContain('logged in as alice@example.com');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('shows status in JSON format', async () => {
|
it('shows status in JSON format', async () => {
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand(baseDeps());
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
|
||||||
checkHealth: async () => true,
|
|
||||||
});
|
|
||||||
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
||||||
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
||||||
expect(parsed['version']).toBe('0.1.0');
|
expect(parsed['version']).toBe('0.1.0');
|
||||||
@@ -87,12 +82,7 @@ describe('status command', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('shows status in YAML format', async () => {
|
it('shows status in YAML format', async () => {
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand(baseDeps({ checkHealth: async () => false }));
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
|
||||||
checkHealth: async () => false,
|
|
||||||
});
|
|
||||||
await cmd.parseAsync(['-o', 'yaml'], { from: 'user' });
|
await cmd.parseAsync(['-o', 'yaml'], { from: 'user' });
|
||||||
expect(output[0]).toContain('mcplocalReachable: false');
|
expect(output[0]).toContain('mcplocalReachable: false');
|
||||||
});
|
});
|
||||||
@@ -100,15 +90,12 @@ describe('status command', () => {
|
|||||||
it('checks correct URLs from config', async () => {
|
it('checks correct URLs from config', async () => {
|
||||||
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://local:3200', mcpdUrl: 'http://remote:3100' }, { configDir: tempDir });
|
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://local:3200', mcpdUrl: 'http://remote:3100' }, { configDir: tempDir });
|
||||||
const checkedUrls: string[] = [];
|
const checkedUrls: string[] = [];
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand(baseDeps({
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
|
||||||
checkHealth: async (url) => {
|
checkHealth: async (url) => {
|
||||||
checkedUrls.push(url);
|
checkedUrls.push(url);
|
||||||
return false;
|
return false;
|
||||||
},
|
},
|
||||||
});
|
}));
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
expect(checkedUrls).toContain('http://local:3200');
|
expect(checkedUrls).toContain('http://local:3200');
|
||||||
expect(checkedUrls).toContain('http://remote:3100');
|
expect(checkedUrls).toContain('http://remote:3100');
|
||||||
@@ -116,14 +103,100 @@ describe('status command', () => {
|
|||||||
|
|
||||||
it('shows registries from config', async () => {
|
it('shows registries from config', async () => {
|
||||||
saveConfig({ ...DEFAULT_CONFIG, registries: ['official'] }, { configDir: tempDir });
|
saveConfig({ ...DEFAULT_CONFIG, registries: ['official'] }, { configDir: tempDir });
|
||||||
const cmd = createStatusCommand({
|
const cmd = createStatusCommand(baseDeps());
|
||||||
configDeps: { configDir: tempDir },
|
|
||||||
credentialsDeps: { configDir: tempDir },
|
|
||||||
log,
|
|
||||||
checkHealth: async () => true,
|
|
||||||
});
|
|
||||||
await cmd.parseAsync([], { from: 'user' });
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
expect(output.join('\n')).toContain('official');
|
expect(output.join('\n')).toContain('official');
|
||||||
expect(output.join('\n')).not.toContain('glama');
|
expect(output.join('\n')).not.toContain('glama');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('shows LLM not configured hint when no LLM is set', async () => {
|
||||||
|
const cmd = createStatusCommand(baseDeps());
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
const out = output.join('\n');
|
||||||
|
expect(out).toContain('LLM:');
|
||||||
|
expect(out).toContain('not configured');
|
||||||
|
expect(out).toContain('mcpctl config setup');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows green check when LLM is healthy (non-TTY)', async () => {
|
||||||
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'anthropic', model: 'claude-haiku-3-5-20241022' } }, { configDir: tempDir });
|
||||||
|
const cmd = createStatusCommand(baseDeps({ checkLlm: async () => 'ok' }));
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
const out = output.join('\n');
|
||||||
|
expect(out).toContain('anthropic / claude-haiku-3-5-20241022');
|
||||||
|
expect(out).toContain('✓ ok');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows red cross when LLM check fails (non-TTY)', async () => {
|
||||||
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
||||||
|
const cmd = createStatusCommand(baseDeps({ checkLlm: async () => 'not authenticated' }));
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
const out = output.join('\n');
|
||||||
|
expect(out).toContain('✗ not authenticated');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows error message from mcplocal', async () => {
|
||||||
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
||||||
|
const cmd = createStatusCommand(baseDeps({ checkLlm: async () => 'binary not found' }));
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
expect(output.join('\n')).toContain('✗ binary not found');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('queries mcplocal URL for LLM health', async () => {
|
||||||
|
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://custom:9999', llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
||||||
|
let queriedUrl = '';
|
||||||
|
const cmd = createStatusCommand(baseDeps({
|
||||||
|
checkLlm: async (url) => { queriedUrl = url; return 'ok'; },
|
||||||
|
}));
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
expect(queriedUrl).toBe('http://custom:9999');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('uses spinner on TTY and writes final result', async () => {
|
||||||
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
||||||
|
const cmd = createStatusCommand(baseDeps({
|
||||||
|
isTTY: true,
|
||||||
|
checkLlm: async () => 'ok',
|
||||||
|
}));
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
// On TTY, the final LLM line goes through write(), not log()
|
||||||
|
const finalWrite = written[written.length - 1];
|
||||||
|
expect(finalWrite).toContain('gemini-cli / gemini-2.5-flash');
|
||||||
|
expect(finalWrite).toContain('✓ ok');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('uses spinner on TTY and shows failure', async () => {
|
||||||
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
||||||
|
const cmd = createStatusCommand(baseDeps({
|
||||||
|
isTTY: true,
|
||||||
|
checkLlm: async () => 'not authenticated',
|
||||||
|
}));
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
const finalWrite = written[written.length - 1];
|
||||||
|
expect(finalWrite).toContain('✗ not authenticated');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shows not configured when LLM provider is none', async () => {
|
||||||
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'none' } }, { configDir: tempDir });
|
||||||
|
const cmd = createStatusCommand(baseDeps());
|
||||||
|
await cmd.parseAsync([], { from: 'user' });
|
||||||
|
expect(output.join('\n')).toContain('not configured');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('includes llm and llmStatus in JSON output', async () => {
|
||||||
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
||||||
|
const cmd = createStatusCommand(baseDeps({ checkLlm: async () => 'ok' }));
|
||||||
|
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
||||||
|
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
||||||
|
expect(parsed['llm']).toBe('gemini-cli / gemini-2.5-flash');
|
||||||
|
expect(parsed['llmStatus']).toBe('ok');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('includes null llm in JSON output when not configured', async () => {
|
||||||
|
const cmd = createStatusCommand(baseDeps());
|
||||||
|
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
||||||
|
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
||||||
|
expect(parsed['llm']).toBeNull();
|
||||||
|
expect(parsed['llmStatus']).toBeNull();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ describe('fish completions', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('does not offer resource types without __mcpctl_needs_resource_type guard', () => {
|
it('does not offer resource types without __mcpctl_needs_resource_type guard', () => {
|
||||||
const resourceTypes = ['servers', 'instances', 'secrets', 'templates', 'projects', 'users', 'groups', 'rbac'];
|
const resourceTypes = ['servers', 'instances', 'secrets', 'templates', 'projects', 'users', 'groups', 'rbac', 'prompts', 'promptrequests'];
|
||||||
const lines = fishFile.split('\n').filter((l) => l.startsWith('complete '));
|
const lines = fishFile.split('\n').filter((l) => l.startsWith('complete '));
|
||||||
|
|
||||||
for (const line of lines) {
|
for (const line of lines) {
|
||||||
|
|||||||
@@ -48,6 +48,33 @@ export async function refreshProjectUpstreams(
|
|||||||
return syncUpstreams(router, mcpdClient, servers);
|
return syncUpstreams(router, mcpdClient, servers);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch a project's LLM config (llmProvider, llmModel) from mcpd.
|
||||||
|
* These are the project-level "recommendations" — local overrides take priority.
|
||||||
|
*/
|
||||||
|
export interface ProjectLlmConfig {
|
||||||
|
llmProvider?: string;
|
||||||
|
llmModel?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function fetchProjectLlmConfig(
|
||||||
|
mcpdClient: McpdClient,
|
||||||
|
projectName: string,
|
||||||
|
): Promise<ProjectLlmConfig> {
|
||||||
|
try {
|
||||||
|
const project = await mcpdClient.get<{
|
||||||
|
llmProvider?: string;
|
||||||
|
llmModel?: string;
|
||||||
|
}>(`/api/v1/projects/${encodeURIComponent(projectName)}`);
|
||||||
|
const config: ProjectLlmConfig = {};
|
||||||
|
if (project.llmProvider) config.llmProvider = project.llmProvider;
|
||||||
|
if (project.llmModel) config.llmModel = project.llmModel;
|
||||||
|
return config;
|
||||||
|
} catch {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** Shared sync logic: reconcile a router's upstreams with a server list. */
|
/** Shared sync logic: reconcile a router's upstreams with a server list. */
|
||||||
function syncUpstreams(router: McpRouter, mcpdClient: McpdClient, servers: McpdServer[]): string[] {
|
function syncUpstreams(router: McpRouter, mcpdClient: McpdClient, servers: McpdServer[]): string[] {
|
||||||
const registered: string[] = [];
|
const registered: string[] = [];
|
||||||
|
|||||||
@@ -37,6 +37,119 @@ function loadUserToken(): string {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface LlmFileConfig {
|
||||||
|
provider: string;
|
||||||
|
model?: string;
|
||||||
|
url?: string;
|
||||||
|
binaryPath?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Multi-provider entry from config file. */
|
||||||
|
export interface LlmProviderFileEntry {
|
||||||
|
name: string;
|
||||||
|
type: string;
|
||||||
|
model?: string;
|
||||||
|
url?: string;
|
||||||
|
binaryPath?: string;
|
||||||
|
tier?: 'fast' | 'heavy';
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ProjectLlmOverride {
|
||||||
|
model?: string;
|
||||||
|
provider?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LlmMultiFileConfig {
|
||||||
|
providers: LlmProviderFileEntry[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface McpctlConfig {
|
||||||
|
llm?: LlmFileConfig | LlmMultiFileConfig;
|
||||||
|
projects?: Record<string, { llm?: ProjectLlmOverride }>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Cached config for the process lifetime (reloaded on SIGHUP if needed). */
|
||||||
|
let cachedConfig: McpctlConfig | null = null;
|
||||||
|
|
||||||
|
function loadFullConfig(): McpctlConfig {
|
||||||
|
if (cachedConfig) return cachedConfig;
|
||||||
|
try {
|
||||||
|
const configPath = join(homedir(), '.mcpctl', 'config.json');
|
||||||
|
if (!existsSync(configPath)) return {};
|
||||||
|
const raw = readFileSync(configPath, 'utf-8');
|
||||||
|
cachedConfig = JSON.parse(raw) as McpctlConfig;
|
||||||
|
return cachedConfig;
|
||||||
|
} catch {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Type guard: is config the multi-provider format? */
|
||||||
|
function isMultiConfig(llm: LlmFileConfig | LlmMultiFileConfig): llm is LlmMultiFileConfig {
|
||||||
|
return 'providers' in llm && Array.isArray((llm as LlmMultiFileConfig).providers);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load LLM configuration from ~/.mcpctl/config.json.
|
||||||
|
* Returns undefined if no LLM section is configured.
|
||||||
|
* @deprecated Use loadLlmProviders() for multi-provider support.
|
||||||
|
*/
|
||||||
|
export function loadLlmConfig(): LlmFileConfig | undefined {
|
||||||
|
const config = loadFullConfig();
|
||||||
|
if (!config.llm) return undefined;
|
||||||
|
if (isMultiConfig(config.llm)) {
|
||||||
|
// Multi-provider format — return first provider as legacy compat
|
||||||
|
const first = config.llm.providers[0];
|
||||||
|
if (!first) return undefined;
|
||||||
|
const legacy: LlmFileConfig = { provider: first.type };
|
||||||
|
if (first.model) legacy.model = first.model;
|
||||||
|
if (first.url) legacy.url = first.url;
|
||||||
|
if (first.binaryPath) legacy.binaryPath = first.binaryPath;
|
||||||
|
return legacy;
|
||||||
|
}
|
||||||
|
if (!config.llm.provider || config.llm.provider === 'none') return undefined;
|
||||||
|
return config.llm;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load LLM providers from ~/.mcpctl/config.json.
|
||||||
|
* Normalizes both legacy single-provider and multi-provider formats.
|
||||||
|
* Returns empty array if no LLM is configured.
|
||||||
|
*/
|
||||||
|
export function loadLlmProviders(): LlmProviderFileEntry[] {
|
||||||
|
const config = loadFullConfig();
|
||||||
|
if (!config.llm) return [];
|
||||||
|
|
||||||
|
if (isMultiConfig(config.llm)) {
|
||||||
|
return config.llm.providers.filter((p) => p.type !== 'none');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Legacy single-provider format → normalize to one entry
|
||||||
|
if (!config.llm.provider || config.llm.provider === 'none') return [];
|
||||||
|
const entry: LlmProviderFileEntry = {
|
||||||
|
name: config.llm.provider,
|
||||||
|
type: config.llm.provider,
|
||||||
|
};
|
||||||
|
if (config.llm.model) entry.model = config.llm.model;
|
||||||
|
if (config.llm.url) entry.url = config.llm.url;
|
||||||
|
if (config.llm.binaryPath) entry.binaryPath = config.llm.binaryPath;
|
||||||
|
return [entry];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load per-project LLM override from ~/.mcpctl/config.json.
|
||||||
|
* Returns the project-specific model/provider override, or undefined.
|
||||||
|
*/
|
||||||
|
export function loadProjectLlmOverride(projectName: string): ProjectLlmOverride | undefined {
|
||||||
|
const config = loadFullConfig();
|
||||||
|
return config.projects?.[projectName]?.llm;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Reset cached config (for testing). */
|
||||||
|
export function resetConfigCache(): void {
|
||||||
|
cachedConfig = null;
|
||||||
|
}
|
||||||
|
|
||||||
export function loadHttpConfig(env: Record<string, string | undefined> = process.env): HttpConfig {
|
export function loadHttpConfig(env: Record<string, string | undefined> = process.env): HttpConfig {
|
||||||
const portStr = env['MCPLOCAL_HTTP_PORT'];
|
const portStr = env['MCPLOCAL_HTTP_PORT'];
|
||||||
const port = portStr !== undefined ? parseInt(portStr, 10) : DEFAULT_HTTP_PORT;
|
const port = portStr !== undefined ? parseInt(portStr, 10) : DEFAULT_HTTP_PORT;
|
||||||
|
|||||||
@@ -13,8 +13,10 @@ import { StreamableHTTPServerTransport } from '@modelcontextprotocol/sdk/server/
|
|||||||
import type { JSONRPCMessage } from '@modelcontextprotocol/sdk/types.js';
|
import type { JSONRPCMessage } from '@modelcontextprotocol/sdk/types.js';
|
||||||
import { McpRouter } from '../router.js';
|
import { McpRouter } from '../router.js';
|
||||||
import { ResponsePaginator } from '../llm/pagination.js';
|
import { ResponsePaginator } from '../llm/pagination.js';
|
||||||
import { refreshProjectUpstreams } from '../discovery.js';
|
import { refreshProjectUpstreams, fetchProjectLlmConfig } from '../discovery.js';
|
||||||
|
import { loadProjectLlmOverride } from './config.js';
|
||||||
import type { McpdClient } from './mcpd-client.js';
|
import type { McpdClient } from './mcpd-client.js';
|
||||||
|
import type { ProviderRegistry } from '../providers/registry.js';
|
||||||
import type { JsonRpcRequest } from '../types.js';
|
import type { JsonRpcRequest } from '../types.js';
|
||||||
|
|
||||||
interface ProjectCacheEntry {
|
interface ProjectCacheEntry {
|
||||||
@@ -29,7 +31,7 @@ interface SessionEntry {
|
|||||||
|
|
||||||
const CACHE_TTL_MS = 60_000; // 60 seconds
|
const CACHE_TTL_MS = 60_000; // 60 seconds
|
||||||
|
|
||||||
export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: McpdClient): void {
|
export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: McpdClient, providerRegistry?: ProviderRegistry | null): void {
|
||||||
const projectCache = new Map<string, ProjectCacheEntry>();
|
const projectCache = new Map<string, ProjectCacheEntry>();
|
||||||
const sessions = new Map<string, SessionEntry>();
|
const sessions = new Map<string, SessionEntry>();
|
||||||
|
|
||||||
@@ -45,8 +47,13 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
|
|||||||
const router = existing?.router ?? new McpRouter();
|
const router = existing?.router ?? new McpRouter();
|
||||||
await refreshProjectUpstreams(router, mcpdClient, projectName, authToken);
|
await refreshProjectUpstreams(router, mcpdClient, projectName, authToken);
|
||||||
|
|
||||||
// Wire pagination support (no LLM provider for now — simple index fallback)
|
// Resolve project LLM model: local override → mcpd recommendation → global default
|
||||||
router.setPaginator(new ResponsePaginator(null));
|
const localOverride = loadProjectLlmOverride(projectName);
|
||||||
|
const mcpdConfig = await fetchProjectLlmConfig(mcpdClient, projectName);
|
||||||
|
const resolvedModel = localOverride?.model ?? mcpdConfig.llmModel ?? undefined;
|
||||||
|
|
||||||
|
// Wire pagination support with LLM provider and project model override
|
||||||
|
router.setPaginator(new ResponsePaginator(providerRegistry ?? null, {}, resolvedModel));
|
||||||
|
|
||||||
// Configure prompt resources with SA-scoped client for RBAC
|
// Configure prompt resources with SA-scoped client for RBAC
|
||||||
const saClient = mcpdClient.withHeaders({ 'X-Service-Account': `project:${projectName}` });
|
const saClient = mcpdClient.withHeaders({ 'X-Service-Account': `project:${projectName}` });
|
||||||
|
|||||||
@@ -10,11 +10,13 @@ import { registerProjectMcpEndpoint } from './project-mcp-endpoint.js';
|
|||||||
import type { McpRouter } from '../router.js';
|
import type { McpRouter } from '../router.js';
|
||||||
import type { HealthMonitor } from '../health.js';
|
import type { HealthMonitor } from '../health.js';
|
||||||
import type { TieredHealthMonitor } from '../health/tiered.js';
|
import type { TieredHealthMonitor } from '../health/tiered.js';
|
||||||
|
import type { ProviderRegistry } from '../providers/registry.js';
|
||||||
|
|
||||||
export interface HttpServerDeps {
|
export interface HttpServerDeps {
|
||||||
router: McpRouter;
|
router: McpRouter;
|
||||||
healthMonitor?: HealthMonitor | undefined;
|
healthMonitor?: HealthMonitor | undefined;
|
||||||
tieredHealthMonitor?: TieredHealthMonitor | undefined;
|
tieredHealthMonitor?: TieredHealthMonitor | undefined;
|
||||||
|
providerRegistry?: ProviderRegistry | null | undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function createHttpServer(
|
export async function createHttpServer(
|
||||||
@@ -79,6 +81,81 @@ export async function createHttpServer(
|
|||||||
reply.code(200).send({ status: 'ok' });
|
reply.code(200).send({ status: 'ok' });
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// LLM health check — cached to avoid burning tokens on every call.
|
||||||
|
// Does a real inference call at most once per 10 minutes.
|
||||||
|
let llmHealthCache: { result: Record<string, unknown>; expiresAt: number } | null = null;
|
||||||
|
const LLM_HEALTH_CACHE_MS = 10 * 60 * 1000; // 10 minutes
|
||||||
|
|
||||||
|
app.get('/llm/health', async (_request, reply) => {
|
||||||
|
const provider = deps.providerRegistry?.getProvider('fast') ?? null;
|
||||||
|
if (!provider) {
|
||||||
|
reply.code(200).send({ status: 'not configured' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return cached result if fresh
|
||||||
|
if (llmHealthCache && Date.now() < llmHealthCache.expiresAt) {
|
||||||
|
reply.code(200).send(llmHealthCache.result);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await provider.complete({
|
||||||
|
messages: [{ role: 'user', content: 'Respond with exactly: ok' }],
|
||||||
|
maxTokens: 10,
|
||||||
|
});
|
||||||
|
const ok = result.content.trim().toLowerCase().includes('ok');
|
||||||
|
const response = {
|
||||||
|
status: ok ? 'ok' : 'unexpected response',
|
||||||
|
provider: provider.name,
|
||||||
|
response: result.content.trim().slice(0, 100),
|
||||||
|
};
|
||||||
|
llmHealthCache = { result: response, expiresAt: Date.now() + LLM_HEALTH_CACHE_MS };
|
||||||
|
reply.code(200).send(response);
|
||||||
|
} catch (err) {
|
||||||
|
const msg = (err as Error).message ?? String(err);
|
||||||
|
const response = {
|
||||||
|
status: 'error',
|
||||||
|
provider: provider.name,
|
||||||
|
error: msg.slice(0, 200),
|
||||||
|
};
|
||||||
|
// Cache errors for 1 minute only (retry sooner)
|
||||||
|
llmHealthCache = { result: response, expiresAt: Date.now() + 60_000 };
|
||||||
|
reply.code(200).send(response);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// LLM models — list available models from the active provider
|
||||||
|
app.get('/llm/models', async (_request, reply) => {
|
||||||
|
const provider = deps.providerRegistry?.getProvider('fast') ?? null;
|
||||||
|
if (!provider) {
|
||||||
|
reply.code(200).send({ models: [], provider: null });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
const models = await provider.listModels();
|
||||||
|
reply.code(200).send({ models, provider: provider.name });
|
||||||
|
} catch {
|
||||||
|
reply.code(200).send({ models: [], provider: provider.name });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// LLM providers — list all registered providers with tier assignments
|
||||||
|
app.get('/llm/providers', async (_request, reply) => {
|
||||||
|
const registry = deps.providerRegistry;
|
||||||
|
if (!registry) {
|
||||||
|
reply.code(200).send({ providers: [], tiers: { fast: [], heavy: [] } });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
reply.code(200).send({
|
||||||
|
providers: registry.list(),
|
||||||
|
tiers: {
|
||||||
|
fast: registry.getTierProviders('fast'),
|
||||||
|
heavy: registry.getTierProviders('heavy'),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
// Proxy management routes to mcpd
|
// Proxy management routes to mcpd
|
||||||
const mcpdClient = new McpdClient(config.mcpdUrl, config.mcpdToken);
|
const mcpdClient = new McpdClient(config.mcpdUrl, config.mcpdToken);
|
||||||
registerProxyRoutes(app, mcpdClient);
|
registerProxyRoutes(app, mcpdClient);
|
||||||
@@ -87,7 +164,7 @@ export async function createHttpServer(
|
|||||||
registerMcpEndpoint(app, deps.router);
|
registerMcpEndpoint(app, deps.router);
|
||||||
|
|
||||||
// Project-scoped MCP endpoint at /projects/:projectName/mcp
|
// Project-scoped MCP endpoint at /projects/:projectName/mcp
|
||||||
registerProjectMcpEndpoint(app, mcpdClient);
|
registerProjectMcpEndpoint(app, mcpdClient, deps.providerRegistry);
|
||||||
|
|
||||||
return app;
|
return app;
|
||||||
}
|
}
|
||||||
|
|||||||
171
src/mcplocal/src/llm-config.ts
Normal file
171
src/mcplocal/src/llm-config.ts
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
import type { SecretStore } from '@mcpctl/shared';
|
||||||
|
import type { LlmFileConfig, LlmProviderFileEntry } from './http/config.js';
|
||||||
|
import { ProviderRegistry } from './providers/registry.js';
|
||||||
|
import { GeminiAcpProvider } from './providers/gemini-acp.js';
|
||||||
|
import { OllamaProvider } from './providers/ollama.js';
|
||||||
|
import { AnthropicProvider } from './providers/anthropic.js';
|
||||||
|
import { OpenAiProvider } from './providers/openai.js';
|
||||||
|
import { DeepSeekProvider } from './providers/deepseek.js';
|
||||||
|
import type { LlmProvider } from './providers/types.js';
|
||||||
|
import type { GeminiAcpConfig } from './providers/gemini-acp.js';
|
||||||
|
import type { OllamaConfig } from './providers/ollama.js';
|
||||||
|
import type { AnthropicConfig } from './providers/anthropic.js';
|
||||||
|
import type { OpenAiConfig } from './providers/openai.js';
|
||||||
|
import type { DeepSeekConfig } from './providers/deepseek.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Thin wrapper that delegates all LlmProvider methods but overrides `name`.
|
||||||
|
* Used when the user's chosen name (e.g. "vllm-local") differs from the
|
||||||
|
* underlying provider's name (e.g. "openai").
|
||||||
|
*/
|
||||||
|
class NamedProvider implements LlmProvider {
|
||||||
|
readonly name: string;
|
||||||
|
private inner: LlmProvider;
|
||||||
|
|
||||||
|
constructor(name: string, inner: LlmProvider) {
|
||||||
|
this.name = name;
|
||||||
|
this.inner = inner;
|
||||||
|
}
|
||||||
|
|
||||||
|
complete(...args: Parameters<LlmProvider['complete']>) {
|
||||||
|
return this.inner.complete(...args);
|
||||||
|
}
|
||||||
|
listModels() {
|
||||||
|
return this.inner.listModels();
|
||||||
|
}
|
||||||
|
isAvailable() {
|
||||||
|
return this.inner.isAvailable();
|
||||||
|
}
|
||||||
|
dispose() {
|
||||||
|
this.inner.dispose?.();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a single LlmProvider from a provider entry config.
|
||||||
|
* Returns null if required config is missing (logs warning).
|
||||||
|
*/
|
||||||
|
async function createSingleProvider(
|
||||||
|
entry: LlmProviderFileEntry,
|
||||||
|
secretStore: SecretStore,
|
||||||
|
): Promise<LlmProvider | null> {
|
||||||
|
switch (entry.type) {
|
||||||
|
case 'gemini-cli': {
|
||||||
|
const cfg: GeminiAcpConfig = {};
|
||||||
|
if (entry.binaryPath) cfg.binaryPath = entry.binaryPath;
|
||||||
|
if (entry.model) cfg.defaultModel = entry.model;
|
||||||
|
const provider = new GeminiAcpProvider(cfg);
|
||||||
|
provider.warmup();
|
||||||
|
return provider;
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'ollama': {
|
||||||
|
const cfg: OllamaConfig = {};
|
||||||
|
if (entry.url) cfg.baseUrl = entry.url;
|
||||||
|
if (entry.model) cfg.defaultModel = entry.model;
|
||||||
|
return new OllamaProvider(cfg);
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'anthropic': {
|
||||||
|
const apiKey = await secretStore.get('anthropic-api-key');
|
||||||
|
if (!apiKey) {
|
||||||
|
process.stderr.write(`Warning: Anthropic API key not found for provider "${entry.name}". Run "mcpctl config setup" to configure.\n`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const cfg: AnthropicConfig = { apiKey };
|
||||||
|
if (entry.model) cfg.defaultModel = entry.model;
|
||||||
|
return new AnthropicProvider(cfg);
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'openai': {
|
||||||
|
const apiKey = await secretStore.get('openai-api-key');
|
||||||
|
if (!apiKey) {
|
||||||
|
process.stderr.write(`Warning: OpenAI API key not found for provider "${entry.name}". Run "mcpctl config setup" to configure.\n`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const cfg: OpenAiConfig = { apiKey };
|
||||||
|
if (entry.url) cfg.baseUrl = entry.url;
|
||||||
|
if (entry.model) cfg.defaultModel = entry.model;
|
||||||
|
return new OpenAiProvider(cfg);
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'deepseek': {
|
||||||
|
const apiKey = await secretStore.get('deepseek-api-key');
|
||||||
|
if (!apiKey) {
|
||||||
|
process.stderr.write(`Warning: DeepSeek API key not found for provider "${entry.name}". Run "mcpctl config setup" to configure.\n`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const cfg: DeepSeekConfig = { apiKey };
|
||||||
|
if (entry.url) cfg.baseUrl = entry.url;
|
||||||
|
if (entry.model) cfg.defaultModel = entry.model;
|
||||||
|
return new DeepSeekProvider(cfg);
|
||||||
|
}
|
||||||
|
|
||||||
|
case 'vllm': {
|
||||||
|
if (!entry.url) {
|
||||||
|
process.stderr.write(`Warning: vLLM URL not configured for provider "${entry.name}". Run "mcpctl config setup" to configure.\n`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return new OpenAiProvider({
|
||||||
|
apiKey: 'unused',
|
||||||
|
baseUrl: entry.url,
|
||||||
|
defaultModel: entry.model ?? 'default',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a ProviderRegistry from multi-provider config entries + secret store.
|
||||||
|
* Registers each provider, wraps with NamedProvider if needed, assigns tiers.
|
||||||
|
*/
|
||||||
|
export async function createProvidersFromConfig(
|
||||||
|
entries: LlmProviderFileEntry[],
|
||||||
|
secretStore: SecretStore,
|
||||||
|
): Promise<ProviderRegistry> {
|
||||||
|
const registry = new ProviderRegistry();
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
const rawProvider = await createSingleProvider(entry, secretStore);
|
||||||
|
if (!rawProvider) continue;
|
||||||
|
|
||||||
|
// Wrap with NamedProvider if user name differs from provider's built-in name
|
||||||
|
const provider = rawProvider.name !== entry.name
|
||||||
|
? new NamedProvider(entry.name, rawProvider)
|
||||||
|
: rawProvider;
|
||||||
|
|
||||||
|
registry.register(provider);
|
||||||
|
|
||||||
|
if (entry.tier) {
|
||||||
|
registry.assignTier(provider.name, entry.tier);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return registry;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a ProviderRegistry from legacy single-provider config + secret store.
|
||||||
|
* @deprecated Use createProvidersFromConfig() with loadLlmProviders() instead.
|
||||||
|
*/
|
||||||
|
export async function createProviderFromConfig(
|
||||||
|
config: LlmFileConfig | undefined,
|
||||||
|
secretStore: SecretStore,
|
||||||
|
): Promise<ProviderRegistry> {
|
||||||
|
if (!config?.provider || config.provider === 'none') {
|
||||||
|
return new ProviderRegistry();
|
||||||
|
}
|
||||||
|
|
||||||
|
const entry: LlmProviderFileEntry = {
|
||||||
|
name: config.provider,
|
||||||
|
type: config.provider,
|
||||||
|
};
|
||||||
|
if (config.model) entry.model = config.model;
|
||||||
|
if (config.url) entry.url = config.url;
|
||||||
|
if (config.binaryPath) entry.binaryPath = config.binaryPath;
|
||||||
|
|
||||||
|
return createProvidersFromConfig([entry], secretStore);
|
||||||
|
}
|
||||||
@@ -105,6 +105,7 @@ export class ResponsePaginator {
|
|||||||
constructor(
|
constructor(
|
||||||
private providers: ProviderRegistry | null,
|
private providers: ProviderRegistry | null,
|
||||||
config: Partial<PaginationConfig> = {},
|
config: Partial<PaginationConfig> = {},
|
||||||
|
private modelOverride?: string,
|
||||||
) {
|
) {
|
||||||
this.config = { ...DEFAULT_PAGINATION_CONFIG, ...config };
|
this.config = { ...DEFAULT_PAGINATION_CONFIG, ...config };
|
||||||
}
|
}
|
||||||
@@ -129,7 +130,8 @@ export class ResponsePaginator {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
index = await this.generateSmartIndex(resultId, toolName, raw, pages);
|
index = await this.generateSmartIndex(resultId, toolName, raw, pages);
|
||||||
} catch {
|
} catch (err) {
|
||||||
|
console.error(`[pagination] Smart index failed for ${toolName}, falling back to simple:`, err instanceof Error ? err.message : String(err));
|
||||||
index = this.generateSimpleIndex(resultId, toolName, raw, pages);
|
index = this.generateSimpleIndex(resultId, toolName, raw, pages);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -240,7 +242,7 @@ export class ResponsePaginator {
|
|||||||
raw: string,
|
raw: string,
|
||||||
pages: PageInfo[],
|
pages: PageInfo[],
|
||||||
): Promise<PaginationIndex> {
|
): Promise<PaginationIndex> {
|
||||||
const provider = this.providers?.getActive();
|
const provider = this.providers?.getProvider('fast');
|
||||||
if (!provider) {
|
if (!provider) {
|
||||||
return this.generateSimpleIndex(resultId, toolName, raw, pages);
|
return this.generateSimpleIndex(resultId, toolName, raw, pages);
|
||||||
}
|
}
|
||||||
@@ -259,9 +261,12 @@ export class ResponsePaginator {
|
|||||||
],
|
],
|
||||||
maxTokens: this.config.indexMaxTokens,
|
maxTokens: this.config.indexMaxTokens,
|
||||||
temperature: 0,
|
temperature: 0,
|
||||||
|
...(this.modelOverride ? { model: this.modelOverride } : {}),
|
||||||
});
|
});
|
||||||
|
|
||||||
const summaries = JSON.parse(result.content) as Array<{ page: number; summary: string }>;
|
// LLMs often wrap JSON in ```json ... ``` fences — strip them
|
||||||
|
const cleaned = result.content.replace(/^```(?:json)?\s*\n?/i, '').replace(/\n?```\s*$/i, '').trim();
|
||||||
|
const summaries = JSON.parse(cleaned) as Array<{ page: number; summary: string }>;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
resultId,
|
resultId,
|
||||||
|
|||||||
@@ -106,7 +106,7 @@ export class LlmProcessor {
|
|||||||
return { optimized: false, params };
|
return { optimized: false, params };
|
||||||
}
|
}
|
||||||
|
|
||||||
const provider = this.providers.getActive();
|
const provider = this.providers.getProvider('fast');
|
||||||
if (!provider) {
|
if (!provider) {
|
||||||
return { optimized: false, params };
|
return { optimized: false, params };
|
||||||
}
|
}
|
||||||
@@ -142,7 +142,7 @@ export class LlmProcessor {
|
|||||||
return { filtered: false, result: response.result, originalSize: raw.length, filteredSize: raw.length };
|
return { filtered: false, result: response.result, originalSize: raw.length, filteredSize: raw.length };
|
||||||
}
|
}
|
||||||
|
|
||||||
const provider = this.providers.getActive();
|
const provider = this.providers.getProvider('fast');
|
||||||
if (!provider) {
|
if (!provider) {
|
||||||
const raw = JSON.stringify(response.result);
|
const raw = JSON.stringify(response.result);
|
||||||
return { filtered: false, result: response.result, originalSize: raw.length, filteredSize: raw.length };
|
return { filtered: false, result: response.result, originalSize: raw.length, filteredSize: raw.length };
|
||||||
|
|||||||
@@ -7,8 +7,11 @@ import { StdioProxyServer } from './server.js';
|
|||||||
import { StdioUpstream } from './upstream/stdio.js';
|
import { StdioUpstream } from './upstream/stdio.js';
|
||||||
import { HttpUpstream } from './upstream/http.js';
|
import { HttpUpstream } from './upstream/http.js';
|
||||||
import { createHttpServer } from './http/server.js';
|
import { createHttpServer } from './http/server.js';
|
||||||
import { loadHttpConfig } from './http/config.js';
|
import { loadHttpConfig, loadLlmProviders } from './http/config.js';
|
||||||
import type { HttpConfig } from './http/config.js';
|
import type { HttpConfig } from './http/config.js';
|
||||||
|
import { createProvidersFromConfig } from './llm-config.js';
|
||||||
|
import { createSecretStore } from '@mcpctl/shared';
|
||||||
|
import type { ProviderRegistry } from './providers/registry.js';
|
||||||
|
|
||||||
interface ParsedArgs {
|
interface ParsedArgs {
|
||||||
configPath: string | undefined;
|
configPath: string | undefined;
|
||||||
@@ -55,12 +58,28 @@ export interface MainResult {
|
|||||||
server: StdioProxyServer;
|
server: StdioProxyServer;
|
||||||
httpServer: FastifyInstance | undefined;
|
httpServer: FastifyInstance | undefined;
|
||||||
httpConfig: HttpConfig;
|
httpConfig: HttpConfig;
|
||||||
|
providerRegistry: ProviderRegistry;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function main(argv: string[] = process.argv): Promise<MainResult> {
|
export async function main(argv: string[] = process.argv): Promise<MainResult> {
|
||||||
const args = parseArgs(argv);
|
const args = parseArgs(argv);
|
||||||
const httpConfig = loadHttpConfig();
|
const httpConfig = loadHttpConfig();
|
||||||
|
|
||||||
|
// Load LLM providers from user config + secret store
|
||||||
|
const llmEntries = loadLlmProviders();
|
||||||
|
const secretStore = await createSecretStore();
|
||||||
|
const providerRegistry = await createProvidersFromConfig(llmEntries, secretStore);
|
||||||
|
if (providerRegistry.hasTierConfig()) {
|
||||||
|
const fast = providerRegistry.getTierProviders('fast');
|
||||||
|
const heavy = providerRegistry.getTierProviders('heavy');
|
||||||
|
process.stderr.write(`LLM providers: fast=[${fast.join(',')}] heavy=[${heavy.join(',')}]\n`);
|
||||||
|
} else {
|
||||||
|
const activeLlm = providerRegistry.getActive();
|
||||||
|
if (activeLlm) {
|
||||||
|
process.stderr.write(`LLM provider: ${activeLlm.name}\n`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let upstreamConfigs: UpstreamConfig[] = [];
|
let upstreamConfigs: UpstreamConfig[] = [];
|
||||||
|
|
||||||
if (args.configPath) {
|
if (args.configPath) {
|
||||||
@@ -115,7 +134,7 @@ export async function main(argv: string[] = process.argv): Promise<MainResult> {
|
|||||||
// Start HTTP server unless disabled
|
// Start HTTP server unless disabled
|
||||||
let httpServer: FastifyInstance | undefined;
|
let httpServer: FastifyInstance | undefined;
|
||||||
if (!args.noHttp) {
|
if (!args.noHttp) {
|
||||||
httpServer = await createHttpServer(httpConfig, { router });
|
httpServer = await createHttpServer(httpConfig, { router, providerRegistry });
|
||||||
await httpServer.listen({ port: httpConfig.httpPort, host: httpConfig.httpHost });
|
await httpServer.listen({ port: httpConfig.httpPort, host: httpConfig.httpHost });
|
||||||
process.stderr.write(`mcpctl-proxy HTTP server listening on ${httpConfig.httpHost}:${httpConfig.httpPort}\n`);
|
process.stderr.write(`mcpctl-proxy HTTP server listening on ${httpConfig.httpHost}:${httpConfig.httpPort}\n`);
|
||||||
}
|
}
|
||||||
@@ -126,6 +145,7 @@ export async function main(argv: string[] = process.argv): Promise<MainResult> {
|
|||||||
if (shuttingDown) return;
|
if (shuttingDown) return;
|
||||||
shuttingDown = true;
|
shuttingDown = true;
|
||||||
|
|
||||||
|
providerRegistry.disposeAll();
|
||||||
server.stop();
|
server.stop();
|
||||||
if (httpServer) {
|
if (httpServer) {
|
||||||
await httpServer.close();
|
await httpServer.close();
|
||||||
@@ -137,7 +157,7 @@ export async function main(argv: string[] = process.argv): Promise<MainResult> {
|
|||||||
process.on('SIGTERM', () => void shutdown());
|
process.on('SIGTERM', () => void shutdown());
|
||||||
process.on('SIGINT', () => void shutdown());
|
process.on('SIGINT', () => void shutdown());
|
||||||
|
|
||||||
return { router, server, httpServer, httpConfig };
|
return { router, server, httpServer, httpConfig, providerRegistry };
|
||||||
}
|
}
|
||||||
|
|
||||||
// Run when executed directly
|
// Run when executed directly
|
||||||
|
|||||||
291
src/mcplocal/src/providers/acp-client.ts
Normal file
291
src/mcplocal/src/providers/acp-client.ts
Normal file
@@ -0,0 +1,291 @@
|
|||||||
|
import { spawn, type ChildProcess } from 'node:child_process';
|
||||||
|
import { createInterface, type Interface as ReadlineInterface } from 'node:readline';
|
||||||
|
|
||||||
|
export interface AcpClientConfig {
|
||||||
|
binaryPath: string;
|
||||||
|
model: string;
|
||||||
|
/** Timeout for individual RPC requests in ms (default: 60000) */
|
||||||
|
requestTimeoutMs: number;
|
||||||
|
/** Timeout for process initialization in ms (default: 30000) */
|
||||||
|
initTimeoutMs: number;
|
||||||
|
/** Override spawn for testing */
|
||||||
|
spawn?: typeof spawn;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PendingRequest {
|
||||||
|
resolve: (result: unknown) => void;
|
||||||
|
reject: (err: Error) => void;
|
||||||
|
timer: ReturnType<typeof setTimeout>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Low-level ACP (Agent Client Protocol) client.
|
||||||
|
* Manages a persistent `gemini --experimental-acp` subprocess and communicates
|
||||||
|
* via JSON-RPC 2.0 over NDJSON stdio.
|
||||||
|
*
|
||||||
|
* Pattern follows StdioUpstream: readline for parsing, pending request map with timeouts.
|
||||||
|
*/
|
||||||
|
export class AcpClient {
|
||||||
|
private process: ChildProcess | null = null;
|
||||||
|
private readline: ReadlineInterface | null = null;
|
||||||
|
private pendingRequests = new Map<number, PendingRequest>();
|
||||||
|
private nextId = 1;
|
||||||
|
private sessionId: string | null = null;
|
||||||
|
private ready = false;
|
||||||
|
private initPromise: Promise<void> | null = null;
|
||||||
|
private readonly config: AcpClientConfig;
|
||||||
|
|
||||||
|
/** Accumulates text chunks from session/update agent_message_chunk during a prompt. */
|
||||||
|
private activePromptChunks: string[] = [];
|
||||||
|
|
||||||
|
constructor(config: AcpClientConfig) {
|
||||||
|
this.config = config;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Ensure the subprocess is spawned and initialized. Idempotent and lazy. */
|
||||||
|
async ensureReady(): Promise<void> {
|
||||||
|
if (this.ready && this.process && !this.process.killed) return;
|
||||||
|
|
||||||
|
// If already initializing, wait for it
|
||||||
|
if (this.initPromise) return this.initPromise;
|
||||||
|
|
||||||
|
this.initPromise = this.doInit();
|
||||||
|
try {
|
||||||
|
await this.initPromise;
|
||||||
|
} catch (err) {
|
||||||
|
this.initPromise = null;
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Send a prompt and collect the streamed text response. */
|
||||||
|
async prompt(text: string): Promise<string> {
|
||||||
|
await this.ensureReady();
|
||||||
|
|
||||||
|
// Set up chunk accumulator
|
||||||
|
this.activePromptChunks = [];
|
||||||
|
|
||||||
|
const result = await this.sendRequest('session/prompt', {
|
||||||
|
sessionId: this.sessionId,
|
||||||
|
prompt: [{ type: 'text', text }],
|
||||||
|
}, this.config.requestTimeoutMs) as { stopReason: string };
|
||||||
|
|
||||||
|
const collected = this.activePromptChunks.join('');
|
||||||
|
this.activePromptChunks = [];
|
||||||
|
|
||||||
|
if (result.stopReason === 'refusal') {
|
||||||
|
throw new Error('Gemini refused to process the prompt');
|
||||||
|
}
|
||||||
|
|
||||||
|
return collected;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Kill the subprocess and clean up. */
|
||||||
|
dispose(): void {
|
||||||
|
this.cleanup();
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Check if the subprocess is alive and initialized. */
|
||||||
|
get isAlive(): boolean {
|
||||||
|
return this.ready && this.process !== null && !this.process.killed;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Private ---
|
||||||
|
|
||||||
|
private async doInit(): Promise<void> {
|
||||||
|
// Clean up any previous state
|
||||||
|
this.cleanup();
|
||||||
|
|
||||||
|
this.spawnProcess();
|
||||||
|
this.setupReadline();
|
||||||
|
|
||||||
|
// ACP handshake: initialize
|
||||||
|
await this.sendRequest('initialize', {
|
||||||
|
protocolVersion: 1,
|
||||||
|
clientCapabilities: {},
|
||||||
|
clientInfo: { name: 'mcpctl', version: '1.0.0' },
|
||||||
|
}, this.config.initTimeoutMs);
|
||||||
|
|
||||||
|
// ACP handshake: session/new
|
||||||
|
const sessionResult = await this.sendRequest('session/new', {
|
||||||
|
cwd: '/tmp',
|
||||||
|
mcpServers: [],
|
||||||
|
}, this.config.initTimeoutMs) as { sessionId: string };
|
||||||
|
|
||||||
|
this.sessionId = sessionResult.sessionId;
|
||||||
|
this.ready = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private spawnProcess(): void {
|
||||||
|
const spawnFn = this.config.spawn ?? spawn;
|
||||||
|
this.process = spawnFn(this.config.binaryPath, ['--experimental-acp'], {
|
||||||
|
stdio: ['pipe', 'pipe', 'pipe'],
|
||||||
|
env: process.env,
|
||||||
|
});
|
||||||
|
|
||||||
|
this.process.on('exit', () => {
|
||||||
|
this.ready = false;
|
||||||
|
this.initPromise = null;
|
||||||
|
this.sessionId = null;
|
||||||
|
|
||||||
|
// Reject all pending requests
|
||||||
|
for (const [id, pending] of this.pendingRequests) {
|
||||||
|
clearTimeout(pending.timer);
|
||||||
|
pending.reject(new Error('Gemini ACP process exited'));
|
||||||
|
this.pendingRequests.delete(id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.process.on('error', (err) => {
|
||||||
|
this.ready = false;
|
||||||
|
this.initPromise = null;
|
||||||
|
|
||||||
|
for (const [id, pending] of this.pendingRequests) {
|
||||||
|
clearTimeout(pending.timer);
|
||||||
|
pending.reject(err);
|
||||||
|
this.pendingRequests.delete(id);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private setupReadline(): void {
|
||||||
|
if (!this.process?.stdout) return;
|
||||||
|
|
||||||
|
this.readline = createInterface({ input: this.process.stdout });
|
||||||
|
this.readline.on('line', (line) => this.handleLine(line));
|
||||||
|
}
|
||||||
|
|
||||||
|
private handleLine(line: string): void {
|
||||||
|
let msg: Record<string, unknown>;
|
||||||
|
try {
|
||||||
|
msg = JSON.parse(line) as Record<string, unknown>;
|
||||||
|
} catch {
|
||||||
|
// Skip non-JSON lines (e.g., debug output on stdout)
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Response to a pending request (has 'id')
|
||||||
|
if ('id' in msg && msg.id !== undefined && ('result' in msg || 'error' in msg)) {
|
||||||
|
this.handleResponse(msg as { id: number; result?: unknown; error?: { code: number; message: string } });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notification (has 'method', no 'id')
|
||||||
|
if ('method' in msg && !('id' in msg)) {
|
||||||
|
this.handleNotification(msg as { method: string; params?: Record<string, unknown> });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Request from agent (has 'method' AND 'id') — agent asking us for something
|
||||||
|
if ('method' in msg && 'id' in msg) {
|
||||||
|
this.handleAgentRequest(msg as { id: number; method: string; params?: Record<string, unknown> });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private handleResponse(msg: { id: number; result?: unknown; error?: { code: number; message: string } }): void {
|
||||||
|
const pending = this.pendingRequests.get(msg.id);
|
||||||
|
if (!pending) return;
|
||||||
|
|
||||||
|
clearTimeout(pending.timer);
|
||||||
|
this.pendingRequests.delete(msg.id);
|
||||||
|
|
||||||
|
if (msg.error) {
|
||||||
|
pending.reject(new Error(`ACP error ${msg.error.code}: ${msg.error.message}`));
|
||||||
|
} else {
|
||||||
|
pending.resolve(msg.result);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private handleNotification(msg: { method: string; params?: Record<string, unknown> }): void {
|
||||||
|
if (msg.method !== 'session/update' || !msg.params) return;
|
||||||
|
|
||||||
|
const update = msg.params.update as Record<string, unknown> | undefined;
|
||||||
|
if (!update) return;
|
||||||
|
|
||||||
|
// Collect text from agent_message_chunk
|
||||||
|
if (update.sessionUpdate === 'agent_message_chunk') {
|
||||||
|
const content = update.content;
|
||||||
|
// Gemini ACP sends content as a single object {type, text} or an array [{type, text}]
|
||||||
|
const blocks: Array<{ type: string; text?: string }> = Array.isArray(content)
|
||||||
|
? content as Array<{ type: string; text?: string }>
|
||||||
|
: content && typeof content === 'object'
|
||||||
|
? [content as { type: string; text?: string }]
|
||||||
|
: [];
|
||||||
|
for (const block of blocks) {
|
||||||
|
if (block.type === 'text' && block.text) {
|
||||||
|
this.activePromptChunks.push(block.text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Handle requests from the agent (e.g., session/request_permission). Reject them all. */
|
||||||
|
private handleAgentRequest(msg: { id: number; method: string; params?: Record<string, unknown> }): void {
|
||||||
|
if (!this.process?.stdin) return;
|
||||||
|
|
||||||
|
if (msg.method === 'session/request_permission') {
|
||||||
|
// Reject permission requests — we don't want tool use
|
||||||
|
const response = JSON.stringify({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: msg.id,
|
||||||
|
result: { outcome: { outcome: 'cancelled' } },
|
||||||
|
});
|
||||||
|
this.process.stdin.write(response + '\n');
|
||||||
|
} else {
|
||||||
|
// Unknown method — return error
|
||||||
|
const response = JSON.stringify({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: msg.id,
|
||||||
|
error: { code: -32601, message: 'Method not supported' },
|
||||||
|
});
|
||||||
|
this.process.stdin.write(response + '\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private sendRequest(method: string, params: Record<string, unknown>, timeoutMs: number): Promise<unknown> {
|
||||||
|
if (!this.process?.stdin) {
|
||||||
|
return Promise.reject(new Error('ACP process not started'));
|
||||||
|
}
|
||||||
|
|
||||||
|
const id = this.nextId++;
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const timer = setTimeout(() => {
|
||||||
|
this.pendingRequests.delete(id);
|
||||||
|
// Kill the process on timeout — it's hung
|
||||||
|
this.cleanup();
|
||||||
|
reject(new Error(`ACP request '${method}' timed out after ${timeoutMs}ms`));
|
||||||
|
}, timeoutMs);
|
||||||
|
|
||||||
|
this.pendingRequests.set(id, { resolve, reject, timer });
|
||||||
|
|
||||||
|
const msg = JSON.stringify({ jsonrpc: '2.0', id, method, params });
|
||||||
|
this.process!.stdin!.write(msg + '\n');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private cleanup(): void {
|
||||||
|
this.ready = false;
|
||||||
|
this.initPromise = null;
|
||||||
|
this.sessionId = null;
|
||||||
|
this.activePromptChunks = [];
|
||||||
|
|
||||||
|
// Reject all pending requests
|
||||||
|
for (const [id, pending] of this.pendingRequests) {
|
||||||
|
clearTimeout(pending.timer);
|
||||||
|
pending.reject(new Error('ACP client disposed'));
|
||||||
|
this.pendingRequests.delete(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.readline) {
|
||||||
|
this.readline.close();
|
||||||
|
this.readline = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.process) {
|
||||||
|
this.process.kill('SIGTERM');
|
||||||
|
this.process = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
165
src/mcplocal/src/providers/gemini-acp.ts
Normal file
165
src/mcplocal/src/providers/gemini-acp.ts
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
import { execFile } from 'node:child_process';
|
||||||
|
import { promisify } from 'node:util';
|
||||||
|
import type { LlmProvider, CompletionOptions, CompletionResult } from './types.js';
|
||||||
|
import { AcpClient } from './acp-client.js';
|
||||||
|
import type { AcpClientConfig } from './acp-client.js';
|
||||||
|
|
||||||
|
const execFileAsync = promisify(execFile);
|
||||||
|
|
||||||
|
export interface GeminiAcpConfig {
|
||||||
|
binaryPath?: string;
|
||||||
|
defaultModel?: string;
|
||||||
|
requestTimeoutMs?: number;
|
||||||
|
initTimeoutMs?: number;
|
||||||
|
/** Idle TTL for pooled sessions in ms (default: 8 hours) */
|
||||||
|
idleTtlMs?: number;
|
||||||
|
/** Override for testing — passed through to AcpClient */
|
||||||
|
spawn?: AcpClientConfig['spawn'];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PoolEntry {
|
||||||
|
client: AcpClient;
|
||||||
|
lastUsed: number;
|
||||||
|
queue: Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gemini CLI provider using ACP (Agent Client Protocol) mode.
|
||||||
|
*
|
||||||
|
* Maintains a pool of persistent subprocesses keyed by model name.
|
||||||
|
* Each model gets its own `gemini --experimental-acp` subprocess with
|
||||||
|
* a serial request queue. Idle sessions are evicted after 8 hours.
|
||||||
|
*
|
||||||
|
* NOTE: Gemini ACP currently doesn't support per-session model selection,
|
||||||
|
* so all sessions use the same model. The pool infrastructure is ready for
|
||||||
|
* when vLLM/OpenAI providers are added (they support per-request model).
|
||||||
|
*/
|
||||||
|
export class GeminiAcpProvider implements LlmProvider {
|
||||||
|
readonly name = 'gemini-cli';
|
||||||
|
private pool = new Map<string, PoolEntry>();
|
||||||
|
private binaryPath: string;
|
||||||
|
private defaultModel: string;
|
||||||
|
private readonly requestTimeoutMs: number;
|
||||||
|
private readonly initTimeoutMs: number;
|
||||||
|
private readonly idleTtlMs: number;
|
||||||
|
private readonly spawnOverride?: AcpClientConfig['spawn'];
|
||||||
|
|
||||||
|
constructor(config?: GeminiAcpConfig) {
|
||||||
|
this.binaryPath = config?.binaryPath ?? 'gemini';
|
||||||
|
this.defaultModel = config?.defaultModel ?? 'gemini-2.5-flash';
|
||||||
|
this.requestTimeoutMs = config?.requestTimeoutMs ?? 60_000;
|
||||||
|
this.initTimeoutMs = config?.initTimeoutMs ?? 30_000;
|
||||||
|
this.idleTtlMs = config?.idleTtlMs ?? 8 * 60 * 60 * 1000; // 8 hours
|
||||||
|
if (config?.spawn) this.spawnOverride = config.spawn;
|
||||||
|
}
|
||||||
|
|
||||||
|
async complete(options: CompletionOptions): Promise<CompletionResult> {
|
||||||
|
const model = options.model ?? this.defaultModel;
|
||||||
|
const entry = this.getOrCreateEntry(model);
|
||||||
|
entry.lastUsed = Date.now();
|
||||||
|
|
||||||
|
this.evictIdle();
|
||||||
|
|
||||||
|
return this.enqueue(entry, () => this.doComplete(entry.client, options));
|
||||||
|
}
|
||||||
|
|
||||||
|
async listModels(): Promise<string[]> {
|
||||||
|
return ['gemini-2.5-flash', 'gemini-2.5-pro', 'gemini-2.0-flash'];
|
||||||
|
}
|
||||||
|
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await execFileAsync(this.binaryPath, ['--version'], { timeout: 5000 });
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
dispose(): void {
|
||||||
|
for (const entry of this.pool.values()) {
|
||||||
|
entry.client.dispose();
|
||||||
|
}
|
||||||
|
this.pool.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Eagerly spawn the default model's ACP subprocess so it's ready
|
||||||
|
* for the first request (avoids 30s cold-start on health checks).
|
||||||
|
*/
|
||||||
|
warmup(): void {
|
||||||
|
const entry = this.getOrCreateEntry(this.defaultModel);
|
||||||
|
// Fire-and-forget: start the subprocess initialization in the background
|
||||||
|
entry.client.ensureReady().catch(() => {
|
||||||
|
// Ignore errors — next request will retry
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Number of active pool entries (for testing). */
|
||||||
|
get poolSize(): number {
|
||||||
|
return this.pool.size;
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Private ---
|
||||||
|
|
||||||
|
private getOrCreateEntry(model: string): PoolEntry {
|
||||||
|
const existing = this.pool.get(model);
|
||||||
|
if (existing) return existing;
|
||||||
|
|
||||||
|
const acpConfig: AcpClientConfig = {
|
||||||
|
binaryPath: this.binaryPath,
|
||||||
|
model,
|
||||||
|
requestTimeoutMs: this.requestTimeoutMs,
|
||||||
|
initTimeoutMs: this.initTimeoutMs,
|
||||||
|
};
|
||||||
|
if (this.spawnOverride) acpConfig.spawn = this.spawnOverride;
|
||||||
|
|
||||||
|
const entry: PoolEntry = {
|
||||||
|
client: new AcpClient(acpConfig),
|
||||||
|
lastUsed: Date.now(),
|
||||||
|
queue: Promise.resolve(),
|
||||||
|
};
|
||||||
|
this.pool.set(model, entry);
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
private evictIdle(): void {
|
||||||
|
const now = Date.now();
|
||||||
|
for (const [model, entry] of this.pool) {
|
||||||
|
if (now - entry.lastUsed > this.idleTtlMs) {
|
||||||
|
entry.client.dispose();
|
||||||
|
this.pool.delete(model);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async doComplete(client: AcpClient, options: CompletionOptions): Promise<CompletionResult> {
|
||||||
|
const prompt = options.messages
|
||||||
|
.map((m) => {
|
||||||
|
if (m.role === 'system') return `System: ${m.content}`;
|
||||||
|
if (m.role === 'user') return m.content;
|
||||||
|
if (m.role === 'assistant') return `Assistant: ${m.content}`;
|
||||||
|
return m.content;
|
||||||
|
})
|
||||||
|
.join('\n\n');
|
||||||
|
|
||||||
|
const content = await client.prompt(prompt);
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: content.trim(),
|
||||||
|
toolCalls: [],
|
||||||
|
usage: { promptTokens: 0, completionTokens: 0, totalTokens: 0 },
|
||||||
|
finishReason: 'stop',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private enqueue<T>(entry: PoolEntry, fn: () => Promise<T>): Promise<T> {
|
||||||
|
const result = new Promise<T>((resolve, reject) => {
|
||||||
|
entry.queue = entry.queue.then(
|
||||||
|
() => fn().then(resolve, reject),
|
||||||
|
() => fn().then(resolve, reject),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,4 +9,8 @@ export { GeminiCliProvider } from './gemini-cli.js';
|
|||||||
export type { GeminiCliConfig } from './gemini-cli.js';
|
export type { GeminiCliConfig } from './gemini-cli.js';
|
||||||
export { DeepSeekProvider } from './deepseek.js';
|
export { DeepSeekProvider } from './deepseek.js';
|
||||||
export type { DeepSeekConfig } from './deepseek.js';
|
export type { DeepSeekConfig } from './deepseek.js';
|
||||||
|
export { GeminiAcpProvider } from './gemini-acp.js';
|
||||||
|
export type { GeminiAcpConfig } from './gemini-acp.js';
|
||||||
|
export { AcpClient } from './acp-client.js';
|
||||||
|
export type { AcpClientConfig } from './acp-client.js';
|
||||||
export { ProviderRegistry } from './registry.js';
|
export { ProviderRegistry } from './registry.js';
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
import type { LlmProvider } from './types.js';
|
import type { LlmProvider, Tier } from './types.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Registry for LLM providers. Supports switching the active provider at runtime.
|
* Registry for LLM providers. Supports tier-based routing (fast/heavy)
|
||||||
|
* with cross-tier fallback, and legacy single-provider mode.
|
||||||
*/
|
*/
|
||||||
export class ProviderRegistry {
|
export class ProviderRegistry {
|
||||||
private providers = new Map<string, LlmProvider>();
|
private providers = new Map<string, LlmProvider>();
|
||||||
private activeProvider: string | null = null;
|
private activeProvider: string | null = null;
|
||||||
|
private tierProviders = new Map<Tier, string[]>();
|
||||||
|
|
||||||
register(provider: LlmProvider): void {
|
register(provider: LlmProvider): void {
|
||||||
this.providers.set(provider.name, provider);
|
this.providers.set(provider.name, provider);
|
||||||
@@ -20,6 +22,15 @@ export class ProviderRegistry {
|
|||||||
const first = this.providers.keys().next();
|
const first = this.providers.keys().next();
|
||||||
this.activeProvider = first.done ? null : first.value;
|
this.activeProvider = first.done ? null : first.value;
|
||||||
}
|
}
|
||||||
|
// Remove from tier assignments
|
||||||
|
for (const [tier, names] of this.tierProviders) {
|
||||||
|
const filtered = names.filter((n) => n !== name);
|
||||||
|
if (filtered.length === 0) {
|
||||||
|
this.tierProviders.delete(tier);
|
||||||
|
} else {
|
||||||
|
this.tierProviders.set(tier, filtered);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
setActive(name: string): void {
|
setActive(name: string): void {
|
||||||
@@ -34,6 +45,42 @@ export class ProviderRegistry {
|
|||||||
return this.providers.get(this.activeProvider) ?? null;
|
return this.providers.get(this.activeProvider) ?? null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Assign a provider to a tier. Call order = priority within the tier. */
|
||||||
|
assignTier(providerName: string, tier: Tier): void {
|
||||||
|
if (!this.providers.has(providerName)) {
|
||||||
|
throw new Error(`Provider '${providerName}' is not registered`);
|
||||||
|
}
|
||||||
|
const existing = this.tierProviders.get(tier) ?? [];
|
||||||
|
if (!existing.includes(providerName)) {
|
||||||
|
this.tierProviders.set(tier, [...existing, providerName]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get provider for a specific tier with fallback.
|
||||||
|
* Resolution: requested tier → other tier → getActive() (legacy).
|
||||||
|
*/
|
||||||
|
getProvider(tier: Tier): LlmProvider | null {
|
||||||
|
const primary = this.firstInTier(tier);
|
||||||
|
if (primary) return primary;
|
||||||
|
|
||||||
|
const otherTier: Tier = tier === 'fast' ? 'heavy' : 'fast';
|
||||||
|
const fallback = this.firstInTier(otherTier);
|
||||||
|
if (fallback) return fallback;
|
||||||
|
|
||||||
|
return this.getActive();
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Get provider names assigned to a tier. */
|
||||||
|
getTierProviders(tier: Tier): string[] {
|
||||||
|
return this.tierProviders.get(tier) ?? [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Whether any tier assignments exist (vs legacy single-provider mode). */
|
||||||
|
hasTierConfig(): boolean {
|
||||||
|
return this.tierProviders.size > 0;
|
||||||
|
}
|
||||||
|
|
||||||
get(name: string): LlmProvider | undefined {
|
get(name: string): LlmProvider | undefined {
|
||||||
return this.providers.get(name);
|
return this.providers.get(name);
|
||||||
}
|
}
|
||||||
@@ -45,4 +92,32 @@ export class ProviderRegistry {
|
|||||||
getActiveName(): string | null {
|
getActiveName(): string | null {
|
||||||
return this.activeProvider;
|
return this.activeProvider;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Provider info for status display. */
|
||||||
|
listProviders(): Array<{ name: string; tiers: Tier[] }> {
|
||||||
|
return this.list().map((name) => {
|
||||||
|
const tiers: Tier[] = [];
|
||||||
|
for (const [tier, names] of this.tierProviders) {
|
||||||
|
if (names.includes(name)) tiers.push(tier);
|
||||||
|
}
|
||||||
|
return { name, tiers };
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Dispose all registered providers that have a dispose method. */
|
||||||
|
disposeAll(): void {
|
||||||
|
for (const provider of this.providers.values()) {
|
||||||
|
provider.dispose?.();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private firstInTier(tier: Tier): LlmProvider | null {
|
||||||
|
const names = this.tierProviders.get(tier);
|
||||||
|
if (!names) return null;
|
||||||
|
for (const name of names) {
|
||||||
|
const provider = this.providers.get(name);
|
||||||
|
if (provider) return provider;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -44,6 +44,9 @@ export interface CompletionOptions {
|
|||||||
model?: string;
|
model?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** LLM provider tier. 'fast' = local inference, 'heavy' = cloud reasoning. */
|
||||||
|
export type Tier = 'fast' | 'heavy';
|
||||||
|
|
||||||
export interface LlmProvider {
|
export interface LlmProvider {
|
||||||
/** Provider identifier (e.g., 'openai', 'anthropic', 'ollama') */
|
/** Provider identifier (e.g., 'openai', 'anthropic', 'ollama') */
|
||||||
readonly name: string;
|
readonly name: string;
|
||||||
@@ -53,4 +56,6 @@ export interface LlmProvider {
|
|||||||
listModels(): Promise<string[]>;
|
listModels(): Promise<string[]>;
|
||||||
/** Check if the provider is configured and reachable */
|
/** Check if the provider is configured and reachable */
|
||||||
isAvailable(): Promise<boolean>;
|
isAvailable(): Promise<boolean>;
|
||||||
|
/** Optional cleanup for providers with persistent resources (e.g., subprocesses). */
|
||||||
|
dispose?(): void;
|
||||||
}
|
}
|
||||||
|
|||||||
486
src/mcplocal/tests/acp-client.test.ts
Normal file
486
src/mcplocal/tests/acp-client.test.ts
Normal file
@@ -0,0 +1,486 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { EventEmitter, Readable } from 'node:stream';
|
||||||
|
import { AcpClient } from '../src/providers/acp-client.js';
|
||||||
|
import type { AcpClientConfig } from '../src/providers/acp-client.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a mock child process that speaks ACP protocol.
|
||||||
|
* Returns the mock process and helpers to send responses.
|
||||||
|
*/
|
||||||
|
function createMockProcess() {
|
||||||
|
const stdin = {
|
||||||
|
write: vi.fn(),
|
||||||
|
writable: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
const stdoutEmitter = new EventEmitter();
|
||||||
|
const stdout = Object.assign(stdoutEmitter, {
|
||||||
|
readable: true,
|
||||||
|
// readline needs these
|
||||||
|
[Symbol.asyncIterator]: undefined,
|
||||||
|
pause: vi.fn(),
|
||||||
|
resume: vi.fn(),
|
||||||
|
isPaused: () => false,
|
||||||
|
setEncoding: vi.fn(),
|
||||||
|
read: vi.fn(),
|
||||||
|
destroy: vi.fn(),
|
||||||
|
pipe: vi.fn(),
|
||||||
|
unpipe: vi.fn(),
|
||||||
|
unshift: vi.fn(),
|
||||||
|
wrap: vi.fn(),
|
||||||
|
}) as unknown as Readable;
|
||||||
|
|
||||||
|
const proc = Object.assign(new EventEmitter(), {
|
||||||
|
stdin,
|
||||||
|
stdout,
|
||||||
|
stderr: new EventEmitter(),
|
||||||
|
pid: 12345,
|
||||||
|
killed: false,
|
||||||
|
kill: vi.fn(function (this: { killed: boolean }) {
|
||||||
|
this.killed = true;
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
/** Send a line of JSON from the "agent" to our client */
|
||||||
|
function sendLine(data: unknown) {
|
||||||
|
stdoutEmitter.emit('data', Buffer.from(JSON.stringify(data) + '\n'));
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Send a JSON-RPC response */
|
||||||
|
function sendResponse(id: number, result: unknown) {
|
||||||
|
sendLine({ jsonrpc: '2.0', id, result });
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Send a JSON-RPC error */
|
||||||
|
function sendError(id: number, code: number, message: string) {
|
||||||
|
sendLine({ jsonrpc: '2.0', id, error: { code, message } });
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Send a session/update notification with agent_message_chunk */
|
||||||
|
function sendChunk(sessionId: string, text: string) {
|
||||||
|
sendLine({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
method: 'session/update',
|
||||||
|
params: {
|
||||||
|
sessionId,
|
||||||
|
update: {
|
||||||
|
sessionUpdate: 'agent_message_chunk',
|
||||||
|
content: [{ type: 'text', text }],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Send a session/request_permission request */
|
||||||
|
function sendPermissionRequest(id: number, sessionId: string) {
|
||||||
|
sendLine({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id,
|
||||||
|
method: 'session/request_permission',
|
||||||
|
params: { sessionId },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { proc, stdin, stdout: stdoutEmitter, sendLine, sendResponse, sendError, sendChunk, sendPermissionRequest };
|
||||||
|
}
|
||||||
|
|
||||||
|
function createConfig(overrides?: Partial<AcpClientConfig>): AcpClientConfig {
|
||||||
|
return {
|
||||||
|
binaryPath: '/usr/bin/gemini',
|
||||||
|
model: 'gemini-2.5-flash',
|
||||||
|
requestTimeoutMs: 5000,
|
||||||
|
initTimeoutMs: 5000,
|
||||||
|
...overrides,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('AcpClient', () => {
|
||||||
|
let client: AcpClient;
|
||||||
|
let mock: ReturnType<typeof createMockProcess>;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
mock = createMockProcess();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
client?.dispose();
|
||||||
|
});
|
||||||
|
|
||||||
|
function createClient(configOverrides?: Partial<AcpClientConfig>) {
|
||||||
|
const config = createConfig({
|
||||||
|
spawn: (() => mock.proc) as unknown as AcpClientConfig['spawn'],
|
||||||
|
...configOverrides,
|
||||||
|
});
|
||||||
|
client = new AcpClient(config);
|
||||||
|
return client;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Helper: auto-respond to the initialize + session/new handshake */
|
||||||
|
function autoHandshake(sessionId = 'test-session') {
|
||||||
|
mock.stdin.write.mockImplementation((data: string) => {
|
||||||
|
const msg = JSON.parse(data.trim()) as { id: number; method: string };
|
||||||
|
if (msg.method === 'initialize') {
|
||||||
|
// Respond async to simulate real behavior
|
||||||
|
setImmediate(() => mock.sendResponse(msg.id, {
|
||||||
|
protocolVersion: 1,
|
||||||
|
agentInfo: { name: 'gemini-cli', version: '1.0.0' },
|
||||||
|
}));
|
||||||
|
} else if (msg.method === 'session/new') {
|
||||||
|
setImmediate(() => mock.sendResponse(msg.id, { sessionId }));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('ensureReady', () => {
|
||||||
|
it('spawns process and completes ACP handshake', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake();
|
||||||
|
|
||||||
|
await client.ensureReady();
|
||||||
|
|
||||||
|
expect(client.isAlive).toBe(true);
|
||||||
|
// Verify initialize was sent
|
||||||
|
const calls = mock.stdin.write.mock.calls.map((c) => JSON.parse(c[0] as string));
|
||||||
|
expect(calls[0].method).toBe('initialize');
|
||||||
|
expect(calls[0].params.protocolVersion).toBe(1);
|
||||||
|
expect(calls[0].params.clientInfo.name).toBe('mcpctl');
|
||||||
|
// Verify session/new was sent
|
||||||
|
expect(calls[1].method).toBe('session/new');
|
||||||
|
expect(calls[1].params.cwd).toBe('/tmp');
|
||||||
|
expect(calls[1].params.mcpServers).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('is idempotent when already ready', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake();
|
||||||
|
|
||||||
|
await client.ensureReady();
|
||||||
|
await client.ensureReady();
|
||||||
|
|
||||||
|
// Should only have sent initialize + session/new once
|
||||||
|
const calls = mock.stdin.write.mock.calls;
|
||||||
|
expect(calls.length).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('shares init promise for concurrent calls', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake();
|
||||||
|
|
||||||
|
const p1 = client.ensureReady();
|
||||||
|
const p2 = client.ensureReady();
|
||||||
|
|
||||||
|
await Promise.all([p1, p2]);
|
||||||
|
expect(mock.stdin.write.mock.calls.length).toBe(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('prompt', () => {
|
||||||
|
it('sends session/prompt and collects agent_message_chunk text', async () => {
|
||||||
|
createClient();
|
||||||
|
const sessionId = 'sess-123';
|
||||||
|
autoHandshake(sessionId);
|
||||||
|
|
||||||
|
await client.ensureReady();
|
||||||
|
|
||||||
|
// Now set up the prompt response handler
|
||||||
|
mock.stdin.write.mockImplementation((data: string) => {
|
||||||
|
const msg = JSON.parse(data.trim()) as { id: number; method: string };
|
||||||
|
if (msg.method === 'session/prompt') {
|
||||||
|
setImmediate(() => {
|
||||||
|
mock.sendChunk(sessionId, 'Hello ');
|
||||||
|
mock.sendChunk(sessionId, 'world!');
|
||||||
|
mock.sendResponse(msg.id, { stopReason: 'end_turn' });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await client.prompt('Say hello');
|
||||||
|
expect(result).toBe('Hello world!');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles multi-block content in a single chunk', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake('sess-1');
|
||||||
|
await client.ensureReady();
|
||||||
|
|
||||||
|
mock.stdin.write.mockImplementation((data: string) => {
|
||||||
|
const msg = JSON.parse(data.trim()) as { id: number; method: string };
|
||||||
|
if (msg.method === 'session/prompt') {
|
||||||
|
setImmediate(() => {
|
||||||
|
mock.sendLine({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
method: 'session/update',
|
||||||
|
params: {
|
||||||
|
sessionId: 'sess-1',
|
||||||
|
update: {
|
||||||
|
sessionUpdate: 'agent_message_chunk',
|
||||||
|
content: [
|
||||||
|
{ type: 'text', text: 'Part A' },
|
||||||
|
{ type: 'text', text: ' Part B' },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
mock.sendResponse(msg.id, { stopReason: 'end_turn' });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await client.prompt('test');
|
||||||
|
expect(result).toBe('Part A Part B');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles single-object content (real Gemini ACP format)', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake('sess-1');
|
||||||
|
await client.ensureReady();
|
||||||
|
|
||||||
|
mock.stdin.write.mockImplementation((data: string) => {
|
||||||
|
const msg = JSON.parse(data.trim()) as { id: number; method: string };
|
||||||
|
if (msg.method === 'session/prompt') {
|
||||||
|
setImmediate(() => {
|
||||||
|
// Real Gemini ACP sends content as a single object, not an array
|
||||||
|
mock.sendLine({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
method: 'session/update',
|
||||||
|
params: {
|
||||||
|
sessionId: 'sess-1',
|
||||||
|
update: {
|
||||||
|
sessionUpdate: 'agent_message_chunk',
|
||||||
|
content: { type: 'text', text: 'ok' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
mock.sendResponse(msg.id, { stopReason: 'end_turn' });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await client.prompt('test');
|
||||||
|
expect(result).toBe('ok');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('ignores agent_thought_chunk notifications', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake('sess-1');
|
||||||
|
await client.ensureReady();
|
||||||
|
|
||||||
|
mock.stdin.write.mockImplementation((data: string) => {
|
||||||
|
const msg = JSON.parse(data.trim()) as { id: number; method: string };
|
||||||
|
if (msg.method === 'session/prompt') {
|
||||||
|
setImmediate(() => {
|
||||||
|
// Gemini sends thought chunks before message chunks
|
||||||
|
mock.sendLine({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
method: 'session/update',
|
||||||
|
params: {
|
||||||
|
sessionId: 'sess-1',
|
||||||
|
update: {
|
||||||
|
sessionUpdate: 'agent_thought_chunk',
|
||||||
|
content: { type: 'text', text: 'Thinking about it...' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
mock.sendLine({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
method: 'session/update',
|
||||||
|
params: {
|
||||||
|
sessionId: 'sess-1',
|
||||||
|
update: {
|
||||||
|
sessionUpdate: 'agent_message_chunk',
|
||||||
|
content: { type: 'text', text: 'ok' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
mock.sendResponse(msg.id, { stopReason: 'end_turn' });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await client.prompt('test');
|
||||||
|
expect(result).toBe('ok');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('calls ensureReady automatically (lazy init)', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake('sess-auto');
|
||||||
|
|
||||||
|
// After handshake, handle prompts
|
||||||
|
const originalWrite = mock.stdin.write;
|
||||||
|
let handshakeDone = false;
|
||||||
|
mock.stdin.write.mockImplementation((data: string) => {
|
||||||
|
const msg = JSON.parse(data.trim()) as { id: number; method: string };
|
||||||
|
if (msg.method === 'initialize') {
|
||||||
|
setImmediate(() => mock.sendResponse(msg.id, { protocolVersion: 1 }));
|
||||||
|
} else if (msg.method === 'session/new') {
|
||||||
|
setImmediate(() => {
|
||||||
|
mock.sendResponse(msg.id, { sessionId: 'sess-auto' });
|
||||||
|
handshakeDone = true;
|
||||||
|
});
|
||||||
|
} else if (msg.method === 'session/prompt') {
|
||||||
|
setImmediate(() => {
|
||||||
|
mock.sendChunk('sess-auto', 'ok');
|
||||||
|
mock.sendResponse(msg.id, { stopReason: 'end_turn' });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Call prompt directly without ensureReady
|
||||||
|
const result = await client.prompt('test');
|
||||||
|
expect(result).toBe('ok');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('auto-restart', () => {
|
||||||
|
it('restarts after process exit', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake('sess-1');
|
||||||
|
await client.ensureReady();
|
||||||
|
expect(client.isAlive).toBe(true);
|
||||||
|
|
||||||
|
// Simulate process exit
|
||||||
|
mock.proc.killed = true;
|
||||||
|
mock.proc.emit('exit', 1);
|
||||||
|
expect(client.isAlive).toBe(false);
|
||||||
|
|
||||||
|
// Create a new mock for the respawned process
|
||||||
|
mock = createMockProcess();
|
||||||
|
// Update the spawn function to return new mock
|
||||||
|
(client as unknown as { config: { spawn: unknown } }).config.spawn = () => mock.proc;
|
||||||
|
autoHandshake('sess-2');
|
||||||
|
|
||||||
|
await client.ensureReady();
|
||||||
|
expect(client.isAlive).toBe(true);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('timeout', () => {
|
||||||
|
it('kills process and rejects on request timeout', async () => {
|
||||||
|
createClient({ requestTimeoutMs: 50 });
|
||||||
|
autoHandshake('sess-1');
|
||||||
|
await client.ensureReady();
|
||||||
|
|
||||||
|
// Don't respond to the prompt — let it timeout
|
||||||
|
mock.stdin.write.mockImplementation(() => {});
|
||||||
|
|
||||||
|
await expect(client.prompt('test')).rejects.toThrow('timed out');
|
||||||
|
expect(client.isAlive).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rejects on init timeout', async () => {
|
||||||
|
createClient({ initTimeoutMs: 50 });
|
||||||
|
// Don't respond to initialize
|
||||||
|
mock.stdin.write.mockImplementation(() => {});
|
||||||
|
|
||||||
|
await expect(client.ensureReady()).rejects.toThrow('timed out');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('error handling', () => {
|
||||||
|
it('rejects on ACP error response', async () => {
|
||||||
|
createClient();
|
||||||
|
mock.stdin.write.mockImplementation((data: string) => {
|
||||||
|
const msg = JSON.parse(data.trim()) as { id: number; method: string };
|
||||||
|
setImmediate(() => mock.sendError(msg.id, -32603, 'Internal error'));
|
||||||
|
});
|
||||||
|
|
||||||
|
await expect(client.ensureReady()).rejects.toThrow('ACP error -32603: Internal error');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('rejects pending requests on process crash', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake('sess-1');
|
||||||
|
await client.ensureReady();
|
||||||
|
|
||||||
|
// Override write so prompt sends but gets no response; then crash the process
|
||||||
|
mock.stdin.write.mockImplementation(() => {
|
||||||
|
// After the prompt is sent, simulate a process crash
|
||||||
|
setImmediate(() => {
|
||||||
|
mock.proc.killed = true;
|
||||||
|
mock.proc.emit('exit', 1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const promptPromise = client.prompt('test');
|
||||||
|
await expect(promptPromise).rejects.toThrow('process exited');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('permission requests', () => {
|
||||||
|
it('rejects session/request_permission from agent', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake('sess-1');
|
||||||
|
await client.ensureReady();
|
||||||
|
|
||||||
|
mock.stdin.write.mockImplementation((data: string) => {
|
||||||
|
const msg = JSON.parse(data.trim()) as { id: number; method: string };
|
||||||
|
if (msg.method === 'session/prompt') {
|
||||||
|
setImmediate(() => {
|
||||||
|
// Agent asks for permission first
|
||||||
|
mock.sendPermissionRequest(100, 'sess-1');
|
||||||
|
// Then provides the actual response
|
||||||
|
mock.sendChunk('sess-1', 'done');
|
||||||
|
mock.sendResponse(msg.id, { stopReason: 'end_turn' });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await client.prompt('test');
|
||||||
|
expect(result).toBe('done');
|
||||||
|
|
||||||
|
// Verify we sent a rejection for the permission request
|
||||||
|
const writes = mock.stdin.write.mock.calls.map((c) => {
|
||||||
|
try { return JSON.parse(c[0] as string); } catch { return null; }
|
||||||
|
}).filter(Boolean);
|
||||||
|
const rejection = writes.find((w: Record<string, unknown>) => w.id === 100);
|
||||||
|
expect(rejection).toBeTruthy();
|
||||||
|
expect((rejection as { result: { outcome: { outcome: string } } }).result.outcome.outcome).toBe('cancelled');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('dispose', () => {
|
||||||
|
it('kills process and rejects pending', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake('sess-1');
|
||||||
|
await client.ensureReady();
|
||||||
|
|
||||||
|
// Override write so prompt is sent but gets no response; then dispose
|
||||||
|
mock.stdin.write.mockImplementation(() => {
|
||||||
|
setImmediate(() => client.dispose());
|
||||||
|
});
|
||||||
|
|
||||||
|
const promptPromise = client.prompt('test');
|
||||||
|
await expect(promptPromise).rejects.toThrow('disposed');
|
||||||
|
expect(mock.proc.kill).toHaveBeenCalledWith('SIGTERM');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('is safe to call multiple times', () => {
|
||||||
|
createClient();
|
||||||
|
client.dispose();
|
||||||
|
client.dispose();
|
||||||
|
// No error thrown
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('isAlive', () => {
|
||||||
|
it('returns false before init', () => {
|
||||||
|
createClient();
|
||||||
|
expect(client.isAlive).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns true after successful init', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake();
|
||||||
|
await client.ensureReady();
|
||||||
|
expect(client.isAlive).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false after dispose', async () => {
|
||||||
|
createClient();
|
||||||
|
autoHandshake();
|
||||||
|
await client.ensureReady();
|
||||||
|
client.dispose();
|
||||||
|
expect(client.isAlive).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
200
src/mcplocal/tests/gemini-acp.test.ts
Normal file
200
src/mcplocal/tests/gemini-acp.test.ts
Normal file
@@ -0,0 +1,200 @@
|
|||||||
|
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||||
|
|
||||||
|
const mockEnsureReady = vi.fn(async () => {});
|
||||||
|
const mockPrompt = vi.fn(async () => 'mock response');
|
||||||
|
const mockDispose = vi.fn();
|
||||||
|
|
||||||
|
vi.mock('../src/providers/acp-client.js', () => ({
|
||||||
|
AcpClient: vi.fn(function (this: Record<string, unknown>) {
|
||||||
|
this.ensureReady = mockEnsureReady;
|
||||||
|
this.prompt = mockPrompt;
|
||||||
|
this.dispose = mockDispose;
|
||||||
|
}),
|
||||||
|
}));
|
||||||
|
|
||||||
|
// Must import after mock setup
|
||||||
|
const { GeminiAcpProvider } = await import('../src/providers/gemini-acp.js');
|
||||||
|
|
||||||
|
describe('GeminiAcpProvider', () => {
|
||||||
|
let provider: InstanceType<typeof GeminiAcpProvider>;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
vi.clearAllMocks();
|
||||||
|
mockPrompt.mockResolvedValue('mock response');
|
||||||
|
provider = new GeminiAcpProvider({ binaryPath: '/usr/bin/gemini', defaultModel: 'gemini-2.5-flash' });
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('complete', () => {
|
||||||
|
it('builds prompt from messages and returns CompletionResult', async () => {
|
||||||
|
mockPrompt.mockResolvedValueOnce('The answer is 42.');
|
||||||
|
|
||||||
|
const result = await provider.complete({
|
||||||
|
messages: [
|
||||||
|
{ role: 'system', content: 'You are helpful.' },
|
||||||
|
{ role: 'user', content: 'What is the answer?' },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.content).toBe('The answer is 42.');
|
||||||
|
expect(result.toolCalls).toEqual([]);
|
||||||
|
expect(result.finishReason).toBe('stop');
|
||||||
|
|
||||||
|
const promptText = mockPrompt.mock.calls[0][0] as string;
|
||||||
|
expect(promptText).toContain('System: You are helpful.');
|
||||||
|
expect(promptText).toContain('What is the answer?');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('formats assistant messages with prefix', async () => {
|
||||||
|
mockPrompt.mockResolvedValueOnce('ok');
|
||||||
|
|
||||||
|
await provider.complete({
|
||||||
|
messages: [
|
||||||
|
{ role: 'user', content: 'Hello' },
|
||||||
|
{ role: 'assistant', content: 'Hi there' },
|
||||||
|
{ role: 'user', content: 'How are you?' },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
|
const promptText = mockPrompt.mock.calls[0][0] as string;
|
||||||
|
expect(promptText).toContain('Assistant: Hi there');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('trims response content', async () => {
|
||||||
|
mockPrompt.mockResolvedValueOnce(' padded response \n');
|
||||||
|
|
||||||
|
const result = await provider.complete({
|
||||||
|
messages: [{ role: 'user', content: 'test' }],
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result.content).toBe('padded response');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('serializes concurrent calls to same model', async () => {
|
||||||
|
const callOrder: number[] = [];
|
||||||
|
let callCount = 0;
|
||||||
|
|
||||||
|
mockPrompt.mockImplementation(async () => {
|
||||||
|
const myCall = ++callCount;
|
||||||
|
callOrder.push(myCall);
|
||||||
|
await new Promise((r) => setTimeout(r, 10));
|
||||||
|
return `response-${myCall}`;
|
||||||
|
});
|
||||||
|
|
||||||
|
const [r1, r2, r3] = await Promise.all([
|
||||||
|
provider.complete({ messages: [{ role: 'user', content: 'a' }] }),
|
||||||
|
provider.complete({ messages: [{ role: 'user', content: 'b' }] }),
|
||||||
|
provider.complete({ messages: [{ role: 'user', content: 'c' }] }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(r1.content).toBe('response-1');
|
||||||
|
expect(r2.content).toBe('response-2');
|
||||||
|
expect(r3.content).toBe('response-3');
|
||||||
|
expect(callOrder).toEqual([1, 2, 3]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('continues queue after error', async () => {
|
||||||
|
mockPrompt
|
||||||
|
.mockRejectedValueOnce(new Error('first fails'))
|
||||||
|
.mockResolvedValueOnce('second works');
|
||||||
|
|
||||||
|
const results = await Promise.allSettled([
|
||||||
|
provider.complete({ messages: [{ role: 'user', content: 'a' }] }),
|
||||||
|
provider.complete({ messages: [{ role: 'user', content: 'b' }] }),
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(results[0].status).toBe('rejected');
|
||||||
|
expect(results[1].status).toBe('fulfilled');
|
||||||
|
if (results[1].status === 'fulfilled') {
|
||||||
|
expect(results[1].value.content).toBe('second works');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('session pool', () => {
|
||||||
|
it('creates separate pool entries for different models', async () => {
|
||||||
|
mockPrompt.mockResolvedValue('ok');
|
||||||
|
|
||||||
|
await provider.complete({ messages: [{ role: 'user', content: 'a' }], model: 'gemini-2.5-flash' });
|
||||||
|
await provider.complete({ messages: [{ role: 'user', content: 'b' }], model: 'gemini-2.5-pro' });
|
||||||
|
|
||||||
|
expect(provider.poolSize).toBe(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('reuses existing pool entry for same model', async () => {
|
||||||
|
mockPrompt.mockResolvedValue('ok');
|
||||||
|
|
||||||
|
await provider.complete({ messages: [{ role: 'user', content: 'a' }], model: 'gemini-2.5-flash' });
|
||||||
|
await provider.complete({ messages: [{ role: 'user', content: 'b' }], model: 'gemini-2.5-flash' });
|
||||||
|
|
||||||
|
expect(provider.poolSize).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('uses defaultModel when no model specified', async () => {
|
||||||
|
mockPrompt.mockResolvedValue('ok');
|
||||||
|
|
||||||
|
await provider.complete({ messages: [{ role: 'user', content: 'a' }] });
|
||||||
|
|
||||||
|
expect(provider.poolSize).toBe(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('evicts idle sessions', async () => {
|
||||||
|
// Use a very short TTL for testing
|
||||||
|
const shortTtl = new GeminiAcpProvider({
|
||||||
|
binaryPath: '/usr/bin/gemini',
|
||||||
|
defaultModel: 'gemini-2.5-flash',
|
||||||
|
idleTtlMs: 1, // 1ms TTL
|
||||||
|
});
|
||||||
|
|
||||||
|
mockPrompt.mockResolvedValue('ok');
|
||||||
|
await shortTtl.complete({ messages: [{ role: 'user', content: 'a' }], model: 'model-a' });
|
||||||
|
expect(shortTtl.poolSize).toBe(1);
|
||||||
|
|
||||||
|
// Wait for TTL to expire
|
||||||
|
await new Promise((r) => setTimeout(r, 10));
|
||||||
|
|
||||||
|
// Next complete call triggers eviction of old entry and creates new one
|
||||||
|
await shortTtl.complete({ messages: [{ role: 'user', content: 'b' }], model: 'model-b' });
|
||||||
|
// model-a should have been evicted, only model-b remains
|
||||||
|
expect(shortTtl.poolSize).toBe(1);
|
||||||
|
expect(mockDispose).toHaveBeenCalled();
|
||||||
|
|
||||||
|
shortTtl.dispose();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('dispose kills all pooled clients', async () => {
|
||||||
|
mockPrompt.mockResolvedValue('ok');
|
||||||
|
|
||||||
|
await provider.complete({ messages: [{ role: 'user', content: 'a' }], model: 'model-a' });
|
||||||
|
await provider.complete({ messages: [{ role: 'user', content: 'b' }], model: 'model-b' });
|
||||||
|
expect(provider.poolSize).toBe(2);
|
||||||
|
|
||||||
|
provider.dispose();
|
||||||
|
expect(provider.poolSize).toBe(0);
|
||||||
|
expect(mockDispose).toHaveBeenCalledTimes(2);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('listModels', () => {
|
||||||
|
it('returns static model list', async () => {
|
||||||
|
const models = await provider.listModels();
|
||||||
|
expect(models).toContain('gemini-2.5-flash');
|
||||||
|
expect(models).toContain('gemini-2.5-pro');
|
||||||
|
expect(models).toContain('gemini-2.0-flash');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('dispose', () => {
|
||||||
|
it('delegates to all pooled AcpClients', async () => {
|
||||||
|
mockPrompt.mockResolvedValue('ok');
|
||||||
|
await provider.complete({ messages: [{ role: 'user', content: 'test' }] });
|
||||||
|
provider.dispose();
|
||||||
|
expect(mockDispose).toHaveBeenCalled();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('name', () => {
|
||||||
|
it('is gemini-cli for config compatibility', () => {
|
||||||
|
expect(provider.name).toBe('gemini-cli');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
69
src/mcplocal/tests/http/config.test.ts
Normal file
69
src/mcplocal/tests/http/config.test.ts
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
import { describe, it, expect, vi, afterEach, beforeEach } from 'vitest';
|
||||||
|
import { loadLlmConfig, resetConfigCache } from '../../src/http/config.js';
|
||||||
|
import { existsSync, readFileSync } from 'node:fs';
|
||||||
|
|
||||||
|
vi.mock('node:fs', async () => {
|
||||||
|
const actual = await vi.importActual<typeof import('node:fs')>('node:fs');
|
||||||
|
return {
|
||||||
|
...actual,
|
||||||
|
existsSync: vi.fn(),
|
||||||
|
readFileSync: vi.fn(),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
resetConfigCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('loadLlmConfig', () => {
|
||||||
|
it('returns undefined when config file does not exist', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(false);
|
||||||
|
expect(loadLlmConfig()).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns undefined when config has no llm section', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(true);
|
||||||
|
vi.mocked(readFileSync).mockReturnValue(JSON.stringify({ mcplocalUrl: 'http://localhost:3200' }));
|
||||||
|
expect(loadLlmConfig()).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns undefined when provider is none', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(true);
|
||||||
|
vi.mocked(readFileSync).mockReturnValue(JSON.stringify({ llm: { provider: 'none' } }));
|
||||||
|
expect(loadLlmConfig()).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns LLM config when provider is configured', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(true);
|
||||||
|
vi.mocked(readFileSync).mockReturnValue(JSON.stringify({
|
||||||
|
llm: { provider: 'anthropic', model: 'claude-haiku-3-5-20241022' },
|
||||||
|
}));
|
||||||
|
const result = loadLlmConfig();
|
||||||
|
expect(result).toEqual({ provider: 'anthropic', model: 'claude-haiku-3-5-20241022' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns full LLM config with all fields', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(true);
|
||||||
|
vi.mocked(readFileSync).mockReturnValue(JSON.stringify({
|
||||||
|
llm: { provider: 'vllm', model: 'my-model', url: 'http://gpu:8000' },
|
||||||
|
}));
|
||||||
|
const result = loadLlmConfig();
|
||||||
|
expect(result).toEqual({ provider: 'vllm', model: 'my-model', url: 'http://gpu:8000' });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns undefined on malformed JSON', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(true);
|
||||||
|
vi.mocked(readFileSync).mockReturnValue('NOT JSON!!!');
|
||||||
|
expect(loadLlmConfig()).toBeUndefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns undefined on read error', () => {
|
||||||
|
vi.mocked(existsSync).mockReturnValue(true);
|
||||||
|
vi.mocked(readFileSync).mockImplementation(() => { throw new Error('EACCES'); });
|
||||||
|
expect(loadLlmConfig()).toBeUndefined();
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -6,13 +6,14 @@
|
|||||||
* (node:http) and a mock LLM provider. No Docker or external services needed.
|
* (node:http) and a mock LLM provider. No Docker or external services needed.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { describe, it, expect, beforeEach, afterEach, afterAll } from 'vitest';
|
import { describe, it, expect, vi, beforeEach, afterEach, afterAll } from 'vitest';
|
||||||
import { createServer, type Server, type IncomingMessage, type ServerResponse } from 'node:http';
|
import { createServer, type Server, type IncomingMessage, type ServerResponse } from 'node:http';
|
||||||
|
|
||||||
import { McpRouter } from '../../src/router.js';
|
import { McpRouter } from '../../src/router.js';
|
||||||
import { McpdUpstream } from '../../src/upstream/mcpd.js';
|
import { McpdUpstream } from '../../src/upstream/mcpd.js';
|
||||||
import { McpdClient } from '../../src/http/mcpd-client.js';
|
import { McpdClient } from '../../src/http/mcpd-client.js';
|
||||||
import { LlmProcessor, DEFAULT_PROCESSOR_CONFIG } from '../../src/llm/processor.js';
|
import { LlmProcessor, DEFAULT_PROCESSOR_CONFIG } from '../../src/llm/processor.js';
|
||||||
|
import { ResponsePaginator } from '../../src/llm/pagination.js';
|
||||||
import { ProviderRegistry } from '../../src/providers/registry.js';
|
import { ProviderRegistry } from '../../src/providers/registry.js';
|
||||||
import { TieredHealthMonitor } from '../../src/health/tiered.js';
|
import { TieredHealthMonitor } from '../../src/health/tiered.js';
|
||||||
import { refreshUpstreams } from '../../src/discovery.js';
|
import { refreshUpstreams } from '../../src/discovery.js';
|
||||||
@@ -1096,4 +1097,429 @@ describe('End-to-end integration: 3-tier architecture', () => {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// -----------------------------------------------------------------------
|
||||||
|
// 8. Smart pagination through the full pipeline
|
||||||
|
// -----------------------------------------------------------------------
|
||||||
|
describe('Smart pagination', () => {
|
||||||
|
// Helper: generate a large JSON response (~100KB)
|
||||||
|
function makeLargeToolResult(): { flows: Array<{ id: string; type: string; label: string; wires: string[] }> } {
|
||||||
|
return {
|
||||||
|
flows: Array.from({ length: 200 }, (_, i) => ({
|
||||||
|
id: `flow-${String(i).padStart(4, '0')}`,
|
||||||
|
type: i % 3 === 0 ? 'function' : i % 3 === 1 ? 'http request' : 'inject',
|
||||||
|
label: `Node ${String(i)}: ${i % 3 === 0 ? 'Data transform' : i % 3 === 1 ? 'API call' : 'Timer trigger'}`,
|
||||||
|
wires: [`flow-${String(i + 1).padStart(4, '0')}`],
|
||||||
|
})),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
it('paginates large tool response with smart AI summaries through router', async () => {
|
||||||
|
const largeResult = makeLargeToolResult();
|
||||||
|
|
||||||
|
mockMcpd = await startMockMcpd({
|
||||||
|
servers: [{ id: 'srv-nodered', name: 'node-red', transport: 'stdio' }],
|
||||||
|
proxyResponses: new Map([
|
||||||
|
['srv-nodered:tools/list', {
|
||||||
|
result: { tools: [{ name: 'get_flows', description: 'Get all flows' }] },
|
||||||
|
}],
|
||||||
|
['srv-nodered:tools/call', {
|
||||||
|
result: largeResult,
|
||||||
|
}],
|
||||||
|
]),
|
||||||
|
});
|
||||||
|
|
||||||
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
||||||
|
router = new McpRouter();
|
||||||
|
await refreshUpstreams(router, client);
|
||||||
|
await router.discoverTools();
|
||||||
|
|
||||||
|
// Set up paginator with LLM provider for smart summaries
|
||||||
|
const registry = new ProviderRegistry();
|
||||||
|
const completeFn = vi.fn().mockImplementation(() => ({
|
||||||
|
content: JSON.stringify([
|
||||||
|
{ page: 1, summary: 'Function nodes and data transforms (flow-0000 through flow-0050)' },
|
||||||
|
{ page: 2, summary: 'HTTP request nodes and API integrations (flow-0051 through flow-0100)' },
|
||||||
|
{ page: 3, summary: 'Inject/timer nodes and triggers (flow-0101 through flow-0150)' },
|
||||||
|
{ page: 4, summary: 'Remaining nodes and wire connections (flow-0151 through flow-0199)' },
|
||||||
|
]),
|
||||||
|
}));
|
||||||
|
const mockProvider: LlmProvider = {
|
||||||
|
name: 'test-paginator',
|
||||||
|
isAvailable: () => true,
|
||||||
|
complete: completeFn,
|
||||||
|
};
|
||||||
|
registry.register(mockProvider);
|
||||||
|
|
||||||
|
// Low threshold so our response triggers pagination
|
||||||
|
const paginator = new ResponsePaginator(registry, {
|
||||||
|
sizeThreshold: 1000,
|
||||||
|
pageSize: 8000,
|
||||||
|
});
|
||||||
|
router.setPaginator(paginator);
|
||||||
|
|
||||||
|
// Call the tool — should get pagination index, not raw data
|
||||||
|
const response = await router.route({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 'paginate-1',
|
||||||
|
method: 'tools/call',
|
||||||
|
params: { name: 'node-red/get_flows', arguments: {} },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.error).toBeUndefined();
|
||||||
|
const result = response.result as { content: Array<{ type: string; text: string }> };
|
||||||
|
expect(result.content).toHaveLength(1);
|
||||||
|
const indexText = result.content[0]!.text;
|
||||||
|
|
||||||
|
// Verify smart index with AI summaries
|
||||||
|
expect(indexText).toContain('AI-generated summaries');
|
||||||
|
expect(indexText).toContain('Function nodes and data transforms');
|
||||||
|
expect(indexText).toContain('HTTP request nodes');
|
||||||
|
expect(indexText).toContain('_resultId');
|
||||||
|
expect(indexText).toContain('_page');
|
||||||
|
|
||||||
|
// LLM was called to generate summaries
|
||||||
|
expect(completeFn).toHaveBeenCalledOnce();
|
||||||
|
const llmCall = completeFn.mock.calls[0]![0]!;
|
||||||
|
expect(llmCall.messages[0].role).toBe('system');
|
||||||
|
expect(llmCall.messages[1].content).toContain('node-red/get_flows');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('retrieves specific pages after pagination via _resultId/_page', async () => {
|
||||||
|
const largeResult = makeLargeToolResult();
|
||||||
|
|
||||||
|
mockMcpd = await startMockMcpd({
|
||||||
|
servers: [{ id: 'srv-nodered', name: 'node-red', transport: 'stdio' }],
|
||||||
|
proxyResponses: new Map([
|
||||||
|
['srv-nodered:tools/list', {
|
||||||
|
result: { tools: [{ name: 'get_flows', description: 'Get all flows' }] },
|
||||||
|
}],
|
||||||
|
['srv-nodered:tools/call', {
|
||||||
|
result: largeResult,
|
||||||
|
}],
|
||||||
|
]),
|
||||||
|
});
|
||||||
|
|
||||||
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
||||||
|
router = new McpRouter();
|
||||||
|
await refreshUpstreams(router, client);
|
||||||
|
await router.discoverTools();
|
||||||
|
|
||||||
|
// Simple paginator (no LLM) for predictable behavior
|
||||||
|
const paginator = new ResponsePaginator(null, {
|
||||||
|
sizeThreshold: 1000,
|
||||||
|
pageSize: 8000,
|
||||||
|
});
|
||||||
|
router.setPaginator(paginator);
|
||||||
|
|
||||||
|
// First call — get the pagination index
|
||||||
|
const indexResponse = await router.route({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 'idx-1',
|
||||||
|
method: 'tools/call',
|
||||||
|
params: { name: 'node-red/get_flows', arguments: {} },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(indexResponse.error).toBeUndefined();
|
||||||
|
const indexResult = indexResponse.result as { content: Array<{ text: string }> };
|
||||||
|
const indexText = indexResult.content[0]!.text;
|
||||||
|
const resultIdMatch = /"_resultId": "([^"]+)"/.exec(indexText);
|
||||||
|
expect(resultIdMatch).not.toBeNull();
|
||||||
|
const resultId = resultIdMatch![1]!;
|
||||||
|
|
||||||
|
// Second call — retrieve page 1 via _resultId/_page
|
||||||
|
const page1Response = await router.route({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 'page-1',
|
||||||
|
method: 'tools/call',
|
||||||
|
params: {
|
||||||
|
name: 'node-red/get_flows',
|
||||||
|
arguments: { _resultId: resultId, _page: 1 },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(page1Response.error).toBeUndefined();
|
||||||
|
const page1Result = page1Response.result as { content: Array<{ text: string }> };
|
||||||
|
expect(page1Result.content[0]!.text).toContain('Page 1/');
|
||||||
|
// Page content should contain flow data
|
||||||
|
expect(page1Result.content[0]!.text).toContain('flow-');
|
||||||
|
|
||||||
|
// Third call — retrieve page 2
|
||||||
|
const page2Response = await router.route({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 'page-2',
|
||||||
|
method: 'tools/call',
|
||||||
|
params: {
|
||||||
|
name: 'node-red/get_flows',
|
||||||
|
arguments: { _resultId: resultId, _page: 2 },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(page2Response.error).toBeUndefined();
|
||||||
|
const page2Result = page2Response.result as { content: Array<{ text: string }> };
|
||||||
|
expect(page2Result.content[0]!.text).toContain('Page 2/');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('retrieves full content with _page=all', async () => {
|
||||||
|
const largeResult = makeLargeToolResult();
|
||||||
|
|
||||||
|
mockMcpd = await startMockMcpd({
|
||||||
|
servers: [{ id: 'srv-nodered', name: 'node-red', transport: 'stdio' }],
|
||||||
|
proxyResponses: new Map([
|
||||||
|
['srv-nodered:tools/list', {
|
||||||
|
result: { tools: [{ name: 'get_flows', description: 'Get all flows' }] },
|
||||||
|
}],
|
||||||
|
['srv-nodered:tools/call', {
|
||||||
|
result: largeResult,
|
||||||
|
}],
|
||||||
|
]),
|
||||||
|
});
|
||||||
|
|
||||||
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
||||||
|
router = new McpRouter();
|
||||||
|
await refreshUpstreams(router, client);
|
||||||
|
await router.discoverTools();
|
||||||
|
|
||||||
|
const paginator = new ResponsePaginator(null, {
|
||||||
|
sizeThreshold: 1000,
|
||||||
|
pageSize: 8000,
|
||||||
|
});
|
||||||
|
router.setPaginator(paginator);
|
||||||
|
|
||||||
|
// Get index
|
||||||
|
const indexResponse = await router.route({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 'all-idx',
|
||||||
|
method: 'tools/call',
|
||||||
|
params: { name: 'node-red/get_flows', arguments: {} },
|
||||||
|
});
|
||||||
|
const indexText = (indexResponse.result as { content: Array<{ text: string }> }).content[0]!.text;
|
||||||
|
const resultId = /"_resultId": "([^"]+)"/.exec(indexText)![1]!;
|
||||||
|
|
||||||
|
// Request all pages
|
||||||
|
const allResponse = await router.route({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 'all-1',
|
||||||
|
method: 'tools/call',
|
||||||
|
params: {
|
||||||
|
name: 'node-red/get_flows',
|
||||||
|
arguments: { _resultId: resultId, _page: 'all' },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(allResponse.error).toBeUndefined();
|
||||||
|
const allResult = allResponse.result as { content: Array<{ text: string }> };
|
||||||
|
// Full response should contain the original JSON
|
||||||
|
const fullText = allResult.content[0]!.text;
|
||||||
|
expect(fullText).toContain('flow-0000');
|
||||||
|
expect(fullText).toContain('flow-0199');
|
||||||
|
// Should be the full serialized result
|
||||||
|
expect(JSON.parse(fullText)).toEqual(largeResult);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('falls back to simple index when LLM fails', async () => {
|
||||||
|
const largeResult = makeLargeToolResult();
|
||||||
|
|
||||||
|
mockMcpd = await startMockMcpd({
|
||||||
|
servers: [{ id: 'srv-nodered', name: 'node-red', transport: 'stdio' }],
|
||||||
|
proxyResponses: new Map([
|
||||||
|
['srv-nodered:tools/list', {
|
||||||
|
result: { tools: [{ name: 'get_flows', description: 'Get all flows' }] },
|
||||||
|
}],
|
||||||
|
['srv-nodered:tools/call', {
|
||||||
|
result: largeResult,
|
||||||
|
}],
|
||||||
|
]),
|
||||||
|
});
|
||||||
|
|
||||||
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
||||||
|
router = new McpRouter();
|
||||||
|
await refreshUpstreams(router, client);
|
||||||
|
await router.discoverTools();
|
||||||
|
|
||||||
|
// Set up paginator with a failing LLM
|
||||||
|
const registry = new ProviderRegistry();
|
||||||
|
registry.register(createFailingLlmProvider('broken-llm'));
|
||||||
|
const paginator = new ResponsePaginator(registry, {
|
||||||
|
sizeThreshold: 1000,
|
||||||
|
pageSize: 8000,
|
||||||
|
});
|
||||||
|
router.setPaginator(paginator);
|
||||||
|
|
||||||
|
const response = await router.route({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 'fallback-idx',
|
||||||
|
method: 'tools/call',
|
||||||
|
params: { name: 'node-red/get_flows', arguments: {} },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.error).toBeUndefined();
|
||||||
|
const text = (response.result as { content: Array<{ text: string }> }).content[0]!.text;
|
||||||
|
// Should still paginate, just without AI summaries
|
||||||
|
expect(text).toContain('_resultId');
|
||||||
|
expect(text).not.toContain('AI-generated summaries');
|
||||||
|
expect(text).toContain('Page 1:');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns expired cache message for stale _resultId', async () => {
|
||||||
|
router = new McpRouter();
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50 });
|
||||||
|
router.setPaginator(paginator);
|
||||||
|
|
||||||
|
// Try to retrieve a page with an unknown resultId
|
||||||
|
const response = await router.route({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 'stale-1',
|
||||||
|
method: 'tools/call',
|
||||||
|
params: {
|
||||||
|
name: 'anything/tool',
|
||||||
|
arguments: { _resultId: 'nonexistent-id', _page: 1 },
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.error).toBeUndefined();
|
||||||
|
const text = (response.result as { content: Array<{ text: string }> }).content[0]!.text;
|
||||||
|
expect(text).toContain('expired');
|
||||||
|
expect(text).toContain('re-call');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('skips pagination for small responses', async () => {
|
||||||
|
mockMcpd = await startMockMcpd({
|
||||||
|
servers: [{ id: 'srv-small', name: 'smallserver', transport: 'stdio' }],
|
||||||
|
proxyResponses: new Map([
|
||||||
|
['srv-small:tools/list', {
|
||||||
|
result: { tools: [{ name: 'get_status', description: 'Get status' }] },
|
||||||
|
}],
|
||||||
|
['srv-small:tools/call', {
|
||||||
|
result: { status: 'ok', uptime: 12345 },
|
||||||
|
}],
|
||||||
|
]),
|
||||||
|
});
|
||||||
|
|
||||||
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
||||||
|
router = new McpRouter();
|
||||||
|
await refreshUpstreams(router, client);
|
||||||
|
await router.discoverTools();
|
||||||
|
|
||||||
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 80000, pageSize: 40000 });
|
||||||
|
router.setPaginator(paginator);
|
||||||
|
|
||||||
|
const response = await router.route({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 'small-1',
|
||||||
|
method: 'tools/call',
|
||||||
|
params: { name: 'smallserver/get_status', arguments: {} },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.error).toBeUndefined();
|
||||||
|
// Should return the raw result directly, not a pagination index
|
||||||
|
expect(response.result).toEqual({ status: 'ok', uptime: 12345 });
|
||||||
|
});
|
||||||
|
|
||||||
|
it('handles markdown-fenced LLM responses (Gemini quirk)', async () => {
|
||||||
|
const largeResult = makeLargeToolResult();
|
||||||
|
|
||||||
|
mockMcpd = await startMockMcpd({
|
||||||
|
servers: [{ id: 'srv-nodered', name: 'node-red', transport: 'stdio' }],
|
||||||
|
proxyResponses: new Map([
|
||||||
|
['srv-nodered:tools/list', {
|
||||||
|
result: { tools: [{ name: 'get_flows', description: 'Get all flows' }] },
|
||||||
|
}],
|
||||||
|
['srv-nodered:tools/call', {
|
||||||
|
result: largeResult,
|
||||||
|
}],
|
||||||
|
]),
|
||||||
|
});
|
||||||
|
|
||||||
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
||||||
|
router = new McpRouter();
|
||||||
|
await refreshUpstreams(router, client);
|
||||||
|
await router.discoverTools();
|
||||||
|
|
||||||
|
// Simulate Gemini wrapping JSON in ```json fences
|
||||||
|
const registry = new ProviderRegistry();
|
||||||
|
const mockProvider: LlmProvider = {
|
||||||
|
name: 'gemini-mock',
|
||||||
|
isAvailable: () => true,
|
||||||
|
complete: vi.fn().mockResolvedValue({
|
||||||
|
content: '```json\n' + JSON.stringify([
|
||||||
|
{ page: 1, summary: 'Climate automation flows' },
|
||||||
|
{ page: 2, summary: 'Lighting control flows' },
|
||||||
|
]) + '\n```',
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
registry.register(mockProvider);
|
||||||
|
|
||||||
|
const paginator = new ResponsePaginator(registry, {
|
||||||
|
sizeThreshold: 1000,
|
||||||
|
pageSize: 8000,
|
||||||
|
});
|
||||||
|
router.setPaginator(paginator);
|
||||||
|
|
||||||
|
const response = await router.route({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 'fence-1',
|
||||||
|
method: 'tools/call',
|
||||||
|
params: { name: 'node-red/get_flows', arguments: {} },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.error).toBeUndefined();
|
||||||
|
const text = (response.result as { content: Array<{ text: string }> }).content[0]!.text;
|
||||||
|
// Fences were stripped — smart summaries should appear
|
||||||
|
expect(text).toContain('AI-generated summaries');
|
||||||
|
expect(text).toContain('Climate automation flows');
|
||||||
|
expect(text).toContain('Lighting control flows');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('passes model override to LLM when project has custom model', async () => {
|
||||||
|
const largeResult = makeLargeToolResult();
|
||||||
|
|
||||||
|
mockMcpd = await startMockMcpd({
|
||||||
|
servers: [{ id: 'srv-nodered', name: 'node-red', transport: 'stdio' }],
|
||||||
|
proxyResponses: new Map([
|
||||||
|
['srv-nodered:tools/list', {
|
||||||
|
result: { tools: [{ name: 'get_flows', description: 'Get all flows' }] },
|
||||||
|
}],
|
||||||
|
['srv-nodered:tools/call', {
|
||||||
|
result: largeResult,
|
||||||
|
}],
|
||||||
|
]),
|
||||||
|
});
|
||||||
|
|
||||||
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
||||||
|
router = new McpRouter();
|
||||||
|
await refreshUpstreams(router, client);
|
||||||
|
await router.discoverTools();
|
||||||
|
|
||||||
|
const registry = new ProviderRegistry();
|
||||||
|
const completeFn = vi.fn().mockResolvedValue({
|
||||||
|
content: JSON.stringify([{ page: 1, summary: 'test' }]),
|
||||||
|
});
|
||||||
|
const mockProvider: LlmProvider = {
|
||||||
|
name: 'test-model-override',
|
||||||
|
isAvailable: () => true,
|
||||||
|
complete: completeFn,
|
||||||
|
};
|
||||||
|
registry.register(mockProvider);
|
||||||
|
|
||||||
|
// Paginator with per-project model override
|
||||||
|
const paginator = new ResponsePaginator(registry, {
|
||||||
|
sizeThreshold: 1000,
|
||||||
|
pageSize: 80000, // One big page so we get exactly 1 summary
|
||||||
|
}, 'gemini-2.5-pro');
|
||||||
|
router.setPaginator(paginator);
|
||||||
|
|
||||||
|
await router.route({
|
||||||
|
jsonrpc: '2.0',
|
||||||
|
id: 'model-1',
|
||||||
|
method: 'tools/call',
|
||||||
|
params: { name: 'node-red/get_flows', arguments: {} },
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify the model was passed through to the LLM call
|
||||||
|
expect(completeFn).toHaveBeenCalledOnce();
|
||||||
|
const llmOpts = completeFn.mock.calls[0]![0]!;
|
||||||
|
expect(llmOpts.model).toBe('gemini-2.5-pro');
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
135
src/mcplocal/tests/llm-config.test.ts
Normal file
135
src/mcplocal/tests/llm-config.test.ts
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
import { describe, it, expect, vi } from 'vitest';
|
||||||
|
import { createProviderFromConfig } from '../src/llm-config.js';
|
||||||
|
import type { SecretStore } from '@mcpctl/shared';
|
||||||
|
|
||||||
|
function mockSecretStore(secrets: Record<string, string> = {}): SecretStore {
|
||||||
|
return {
|
||||||
|
get: vi.fn(async (key: string) => secrets[key] ?? null),
|
||||||
|
set: vi.fn(async () => {}),
|
||||||
|
delete: vi.fn(async () => true),
|
||||||
|
backend: () => 'mock',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('createProviderFromConfig', () => {
|
||||||
|
it('returns empty registry for undefined config', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const registry = await createProviderFromConfig(undefined, store);
|
||||||
|
expect(registry.getActive()).toBeNull();
|
||||||
|
expect(registry.list()).toEqual([]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty registry for provider=none', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const registry = await createProviderFromConfig({ provider: 'none' }, store);
|
||||||
|
expect(registry.getActive()).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates gemini-cli provider using ACP', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'gemini-cli', model: 'gemini-2.5-flash', binaryPath: '/usr/bin/gemini' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()).not.toBeNull();
|
||||||
|
expect(registry.getActive()!.name).toBe('gemini-cli');
|
||||||
|
// ACP provider has dispose method
|
||||||
|
expect(typeof registry.getActive()!.dispose).toBe('function');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates ollama provider', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'ollama', model: 'llama3.2', url: 'http://localhost:11434' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()!.name).toBe('ollama');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates anthropic provider with API key from secret store', async () => {
|
||||||
|
const store = mockSecretStore({ 'anthropic-api-key': 'sk-ant-test' });
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'anthropic', model: 'claude-haiku-3-5-20241022' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()!.name).toBe('anthropic');
|
||||||
|
expect(store.get).toHaveBeenCalledWith('anthropic-api-key');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty registry when anthropic API key is missing', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true);
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'anthropic', model: 'claude-haiku-3-5-20241022' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()).toBeNull();
|
||||||
|
expect(stderrSpy).toHaveBeenCalledWith(expect.stringContaining('Anthropic API key not found'));
|
||||||
|
stderrSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates openai provider with API key from secret store', async () => {
|
||||||
|
const store = mockSecretStore({ 'openai-api-key': 'sk-test' });
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'openai', model: 'gpt-4o', url: 'https://api.openai.com' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()!.name).toBe('openai');
|
||||||
|
expect(store.get).toHaveBeenCalledWith('openai-api-key');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty registry when openai API key is missing', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true);
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'openai' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()).toBeNull();
|
||||||
|
stderrSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates deepseek provider with API key from secret store', async () => {
|
||||||
|
const store = mockSecretStore({ 'deepseek-api-key': 'sk-ds-test' });
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'deepseek', model: 'deepseek-chat' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()!.name).toBe('deepseek');
|
||||||
|
expect(store.get).toHaveBeenCalledWith('deepseek-api-key');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty registry when deepseek API key is missing', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true);
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'deepseek' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()).toBeNull();
|
||||||
|
stderrSpy.mockRestore();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates vllm provider using OpenAI provider', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'vllm', model: 'my-model', url: 'http://gpu-server:8000' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
// vLLM reuses OpenAI provider under the hood, wrapped with NamedProvider
|
||||||
|
expect(registry.getActive()).not.toBeNull();
|
||||||
|
expect(registry.getActive()!.name).toBe('vllm');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns empty registry when vllm URL is missing', async () => {
|
||||||
|
const store = mockSecretStore();
|
||||||
|
const stderrSpy = vi.spyOn(process.stderr, 'write').mockImplementation(() => true);
|
||||||
|
const registry = await createProviderFromConfig(
|
||||||
|
{ provider: 'vllm' },
|
||||||
|
store,
|
||||||
|
);
|
||||||
|
expect(registry.getActive()).toBeNull();
|
||||||
|
expect(stderrSpy).toHaveBeenCalledWith(expect.stringContaining('vLLM URL not configured'));
|
||||||
|
stderrSpy.mockRestore();
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -11,6 +11,7 @@ function makeProvider(response: string): ProviderRegistry {
|
|||||||
};
|
};
|
||||||
return {
|
return {
|
||||||
getActive: () => provider,
|
getActive: () => provider,
|
||||||
|
getProvider: () => provider,
|
||||||
register: vi.fn(),
|
register: vi.fn(),
|
||||||
setActive: vi.fn(),
|
setActive: vi.fn(),
|
||||||
listProviders: () => [{ name: 'test', available: true, active: true }],
|
listProviders: () => [{ name: 'test', available: true, active: true }],
|
||||||
@@ -150,6 +151,25 @@ describe('ResponsePaginator', () => {
|
|||||||
expect(text).toContain('HTTP request nodes and API integrations');
|
expect(text).toContain('HTTP request nodes and API integrations');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('strips markdown code fences from LLM JSON response', async () => {
|
||||||
|
const summaries = [
|
||||||
|
{ page: 1, summary: 'Config section' },
|
||||||
|
{ page: 2, summary: 'Data section' },
|
||||||
|
];
|
||||||
|
// Gemini often wraps JSON in ```json ... ``` fences
|
||||||
|
const fenced = '```json\n' + JSON.stringify(summaries) + '\n```';
|
||||||
|
const registry = makeProvider(fenced);
|
||||||
|
const paginator = new ResponsePaginator(registry, { sizeThreshold: 100, pageSize: 60 });
|
||||||
|
const raw = makeLargeStringWithNewlines(150);
|
||||||
|
const result = await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
expect(result).not.toBeNull();
|
||||||
|
const text = result!.content[0]!.text;
|
||||||
|
expect(text).toContain('AI-generated summaries');
|
||||||
|
expect(text).toContain('Config section');
|
||||||
|
expect(text).toContain('Data section');
|
||||||
|
});
|
||||||
|
|
||||||
it('falls back to simple index on LLM failure', async () => {
|
it('falls back to simple index on LLM failure', async () => {
|
||||||
const provider: LlmProvider = {
|
const provider: LlmProvider = {
|
||||||
name: 'test',
|
name: 'test',
|
||||||
@@ -158,6 +178,7 @@ describe('ResponsePaginator', () => {
|
|||||||
};
|
};
|
||||||
const registry = {
|
const registry = {
|
||||||
getActive: () => provider,
|
getActive: () => provider,
|
||||||
|
getProvider: () => provider,
|
||||||
register: vi.fn(),
|
register: vi.fn(),
|
||||||
setActive: vi.fn(),
|
setActive: vi.fn(),
|
||||||
listProviders: () => [{ name: 'test', available: true, active: true }],
|
listProviders: () => [{ name: 'test', available: true, active: true }],
|
||||||
@@ -189,6 +210,7 @@ describe('ResponsePaginator', () => {
|
|||||||
};
|
};
|
||||||
const registry = {
|
const registry = {
|
||||||
getActive: () => provider,
|
getActive: () => provider,
|
||||||
|
getProvider: () => provider,
|
||||||
register: vi.fn(),
|
register: vi.fn(),
|
||||||
setActive: vi.fn(),
|
setActive: vi.fn(),
|
||||||
listProviders: () => [{ name: 'test', available: true, active: true }],
|
listProviders: () => [{ name: 'test', available: true, active: true }],
|
||||||
@@ -212,6 +234,7 @@ describe('ResponsePaginator', () => {
|
|||||||
it('falls back to simple when no active provider', async () => {
|
it('falls back to simple when no active provider', async () => {
|
||||||
const registry = {
|
const registry = {
|
||||||
getActive: () => null,
|
getActive: () => null,
|
||||||
|
getProvider: () => null,
|
||||||
register: vi.fn(),
|
register: vi.fn(),
|
||||||
setActive: vi.fn(),
|
setActive: vi.fn(),
|
||||||
listProviders: () => [],
|
listProviders: () => [],
|
||||||
@@ -225,6 +248,58 @@ describe('ResponsePaginator', () => {
|
|||||||
const text = result!.content[0]!.text;
|
const text = result!.content[0]!.text;
|
||||||
expect(text).not.toContain('AI-generated summaries');
|
expect(text).not.toContain('AI-generated summaries');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('passes modelOverride to provider.complete()', async () => {
|
||||||
|
const completeFn = vi.fn().mockResolvedValue({
|
||||||
|
content: JSON.stringify([{ page: 1, summary: 'test' }, { page: 2, summary: 'test2' }]),
|
||||||
|
});
|
||||||
|
const provider: LlmProvider = {
|
||||||
|
name: 'test',
|
||||||
|
isAvailable: () => true,
|
||||||
|
complete: completeFn,
|
||||||
|
};
|
||||||
|
const registry = {
|
||||||
|
getActive: () => provider,
|
||||||
|
getProvider: () => provider,
|
||||||
|
register: vi.fn(),
|
||||||
|
setActive: vi.fn(),
|
||||||
|
listProviders: () => [{ name: 'test', available: true, active: true }],
|
||||||
|
} as unknown as ProviderRegistry;
|
||||||
|
|
||||||
|
const paginator = new ResponsePaginator(registry, { sizeThreshold: 100, pageSize: 60 }, 'gemini-2.5-pro');
|
||||||
|
const raw = makeLargeStringWithNewlines(150);
|
||||||
|
await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
expect(completeFn).toHaveBeenCalledOnce();
|
||||||
|
const call = completeFn.mock.calls[0]![0]!;
|
||||||
|
expect(call.model).toBe('gemini-2.5-pro');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('omits model when no modelOverride set', async () => {
|
||||||
|
const completeFn = vi.fn().mockResolvedValue({
|
||||||
|
content: JSON.stringify([{ page: 1, summary: 'test' }, { page: 2, summary: 'test2' }]),
|
||||||
|
});
|
||||||
|
const provider: LlmProvider = {
|
||||||
|
name: 'test',
|
||||||
|
isAvailable: () => true,
|
||||||
|
complete: completeFn,
|
||||||
|
};
|
||||||
|
const registry = {
|
||||||
|
getActive: () => provider,
|
||||||
|
getProvider: () => provider,
|
||||||
|
register: vi.fn(),
|
||||||
|
setActive: vi.fn(),
|
||||||
|
listProviders: () => [{ name: 'test', available: true, active: true }],
|
||||||
|
} as unknown as ProviderRegistry;
|
||||||
|
|
||||||
|
const paginator = new ResponsePaginator(registry, { sizeThreshold: 100, pageSize: 60 });
|
||||||
|
const raw = makeLargeStringWithNewlines(150);
|
||||||
|
await paginator.paginate('test/tool', raw);
|
||||||
|
|
||||||
|
expect(completeFn).toHaveBeenCalledOnce();
|
||||||
|
const call = completeFn.mock.calls[0]![0]!;
|
||||||
|
expect(call.model).toBeUndefined();
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// --- getPage ---
|
// --- getPage ---
|
||||||
|
|||||||
@@ -6,8 +6,18 @@ import { registerProjectMcpEndpoint } from '../src/http/project-mcp-endpoint.js'
|
|||||||
// Mock discovery module — we don't want real HTTP calls
|
// Mock discovery module — we don't want real HTTP calls
|
||||||
vi.mock('../src/discovery.js', () => ({
|
vi.mock('../src/discovery.js', () => ({
|
||||||
refreshProjectUpstreams: vi.fn(async () => ['mock-server']),
|
refreshProjectUpstreams: vi.fn(async () => ['mock-server']),
|
||||||
|
fetchProjectLlmConfig: vi.fn(async () => ({})),
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
// Mock config module — don't read real config files
|
||||||
|
vi.mock('../src/http/config.js', async () => {
|
||||||
|
const actual = await vi.importActual<typeof import('../src/http/config.js')>('../src/http/config.js');
|
||||||
|
return {
|
||||||
|
...actual,
|
||||||
|
loadProjectLlmOverride: vi.fn(() => undefined),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
import { refreshProjectUpstreams } from '../src/discovery.js';
|
import { refreshProjectUpstreams } from '../src/discovery.js';
|
||||||
|
|
||||||
function mockMcpdClient() {
|
function mockMcpdClient() {
|
||||||
|
|||||||
@@ -115,4 +115,105 @@ describe('ProviderRegistry', () => {
|
|||||||
|
|
||||||
expect(models).toEqual(['anthropic-model-1', 'anthropic-model-2']);
|
expect(models).toEqual(['anthropic-model-1', 'anthropic-model-2']);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('tier management', () => {
|
||||||
|
it('assigns providers to tiers', () => {
|
||||||
|
registry.register(mockProvider('vllm'));
|
||||||
|
registry.register(mockProvider('gemini'));
|
||||||
|
|
||||||
|
registry.assignTier('vllm', 'fast');
|
||||||
|
registry.assignTier('gemini', 'heavy');
|
||||||
|
|
||||||
|
expect(registry.getTierProviders('fast')).toEqual(['vllm']);
|
||||||
|
expect(registry.getTierProviders('heavy')).toEqual(['gemini']);
|
||||||
|
expect(registry.hasTierConfig()).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getProvider returns tier-specific provider', () => {
|
||||||
|
const vllm = mockProvider('vllm');
|
||||||
|
const gemini = mockProvider('gemini');
|
||||||
|
registry.register(vllm);
|
||||||
|
registry.register(gemini);
|
||||||
|
|
||||||
|
registry.assignTier('vllm', 'fast');
|
||||||
|
registry.assignTier('gemini', 'heavy');
|
||||||
|
|
||||||
|
expect(registry.getProvider('fast')).toBe(vllm);
|
||||||
|
expect(registry.getProvider('heavy')).toBe(gemini);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getProvider falls back to other tier', () => {
|
||||||
|
const vllm = mockProvider('vllm');
|
||||||
|
registry.register(vllm);
|
||||||
|
|
||||||
|
registry.assignTier('vllm', 'fast');
|
||||||
|
|
||||||
|
// Requesting heavy but only fast exists → falls back to fast
|
||||||
|
expect(registry.getProvider('heavy')).toBe(vllm);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('getProvider falls back to getActive when no tiers', () => {
|
||||||
|
const openai = mockProvider('openai');
|
||||||
|
registry.register(openai);
|
||||||
|
|
||||||
|
// No tier assignments → falls back to legacy getActive()
|
||||||
|
expect(registry.getProvider('fast')).toBe(openai);
|
||||||
|
expect(registry.getProvider('heavy')).toBe(openai);
|
||||||
|
expect(registry.hasTierConfig()).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('unregister removes from tier assignments', () => {
|
||||||
|
registry.register(mockProvider('vllm'));
|
||||||
|
registry.register(mockProvider('gemini'));
|
||||||
|
|
||||||
|
registry.assignTier('vllm', 'fast');
|
||||||
|
registry.assignTier('gemini', 'heavy');
|
||||||
|
|
||||||
|
registry.unregister('vllm');
|
||||||
|
|
||||||
|
expect(registry.getTierProviders('fast')).toEqual([]);
|
||||||
|
expect(registry.getTierProviders('heavy')).toEqual(['gemini']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('assignTier throws for unregistered provider', () => {
|
||||||
|
expect(() => registry.assignTier('unknown', 'fast')).toThrow("Provider 'unknown' is not registered");
|
||||||
|
});
|
||||||
|
|
||||||
|
it('multiple providers in same tier uses first', () => {
|
||||||
|
const vllm = mockProvider('vllm');
|
||||||
|
const ollama = mockProvider('ollama');
|
||||||
|
registry.register(vllm);
|
||||||
|
registry.register(ollama);
|
||||||
|
|
||||||
|
registry.assignTier('vllm', 'fast');
|
||||||
|
registry.assignTier('ollama', 'fast');
|
||||||
|
|
||||||
|
expect(registry.getProvider('fast')).toBe(vllm);
|
||||||
|
expect(registry.getTierProviders('fast')).toEqual(['vllm', 'ollama']);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('listProviders includes tier info', () => {
|
||||||
|
registry.register(mockProvider('vllm'));
|
||||||
|
registry.register(mockProvider('gemini'));
|
||||||
|
|
||||||
|
registry.assignTier('vllm', 'fast');
|
||||||
|
registry.assignTier('gemini', 'heavy');
|
||||||
|
|
||||||
|
const providers = registry.listProviders();
|
||||||
|
expect(providers).toEqual([
|
||||||
|
{ name: 'vllm', tiers: ['fast'] },
|
||||||
|
{ name: 'gemini', tiers: ['heavy'] },
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('disposeAll calls dispose on all providers', () => {
|
||||||
|
const disposeFn = vi.fn();
|
||||||
|
const provider = { ...mockProvider('test'), dispose: disposeFn };
|
||||||
|
registry.register(provider);
|
||||||
|
|
||||||
|
registry.disposeAll();
|
||||||
|
|
||||||
|
expect(disposeFn).toHaveBeenCalledOnce();
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -2,3 +2,4 @@ export * from './types/index.js';
|
|||||||
export * from './validation/index.js';
|
export * from './validation/index.js';
|
||||||
export * from './constants/index.js';
|
export * from './constants/index.js';
|
||||||
export * from './utils/index.js';
|
export * from './utils/index.js';
|
||||||
|
export * from './secrets/index.js';
|
||||||
|
|||||||
63
src/shared/src/secrets/file-store.ts
Normal file
63
src/shared/src/secrets/file-store.ts
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import { existsSync, mkdirSync, readFileSync, writeFileSync, chmodSync } from 'node:fs';
|
||||||
|
import { join } from 'node:path';
|
||||||
|
import { homedir } from 'node:os';
|
||||||
|
import type { SecretStore, SecretStoreDeps } from './types.js';
|
||||||
|
|
||||||
|
function defaultConfigDir(): string {
|
||||||
|
return join(homedir(), '.mcpctl');
|
||||||
|
}
|
||||||
|
|
||||||
|
function secretsPath(configDir: string): string {
|
||||||
|
return join(configDir, 'secrets');
|
||||||
|
}
|
||||||
|
|
||||||
|
export class FileSecretStore implements SecretStore {
|
||||||
|
private readonly configDir: string;
|
||||||
|
|
||||||
|
constructor(deps?: SecretStoreDeps) {
|
||||||
|
this.configDir = deps?.configDir ?? defaultConfigDir();
|
||||||
|
}
|
||||||
|
|
||||||
|
backend(): string {
|
||||||
|
return 'file';
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(key: string): Promise<string | null> {
|
||||||
|
const data = this.readAll();
|
||||||
|
return data[key] ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async set(key: string, value: string): Promise<void> {
|
||||||
|
const data = this.readAll();
|
||||||
|
data[key] = value;
|
||||||
|
this.writeAll(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(key: string): Promise<boolean> {
|
||||||
|
const data = this.readAll();
|
||||||
|
if (!(key in data)) return false;
|
||||||
|
delete data[key];
|
||||||
|
this.writeAll(data);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private readAll(): Record<string, string> {
|
||||||
|
const path = secretsPath(this.configDir);
|
||||||
|
if (!existsSync(path)) return {};
|
||||||
|
try {
|
||||||
|
const raw = readFileSync(path, 'utf-8');
|
||||||
|
return JSON.parse(raw) as Record<string, string>;
|
||||||
|
} catch {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private writeAll(data: Record<string, string>): void {
|
||||||
|
if (!existsSync(this.configDir)) {
|
||||||
|
mkdirSync(this.configDir, { recursive: true });
|
||||||
|
}
|
||||||
|
const path = secretsPath(this.configDir);
|
||||||
|
writeFileSync(path, JSON.stringify(data, null, 2) + '\n', 'utf-8');
|
||||||
|
chmodSync(path, 0o600);
|
||||||
|
}
|
||||||
|
}
|
||||||
97
src/shared/src/secrets/gnome-keyring.ts
Normal file
97
src/shared/src/secrets/gnome-keyring.ts
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
import { spawn } from 'node:child_process';
|
||||||
|
import { execFile } from 'node:child_process';
|
||||||
|
import { promisify } from 'node:util';
|
||||||
|
import type { SecretStore } from './types.js';
|
||||||
|
|
||||||
|
const execFileAsync = promisify(execFile);
|
||||||
|
const SERVICE = 'mcpctl';
|
||||||
|
|
||||||
|
export type RunCommand = (cmd: string, args: string[], stdin?: string) => Promise<{ stdout: string; code: number }>;
|
||||||
|
|
||||||
|
function defaultRunCommand(cmd: string, args: string[], stdin?: string): Promise<{ stdout: string; code: number }> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const child = spawn(cmd, args, {
|
||||||
|
stdio: ['pipe', 'pipe', 'pipe'],
|
||||||
|
timeout: 5000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const stdoutChunks: Buffer[] = [];
|
||||||
|
child.stdout.on('data', (chunk: Buffer) => stdoutChunks.push(chunk));
|
||||||
|
|
||||||
|
child.on('error', reject);
|
||||||
|
child.on('close', (code) => {
|
||||||
|
const stdout = Buffer.concat(stdoutChunks).toString('utf-8');
|
||||||
|
resolve({ stdout, code: code ?? 1 });
|
||||||
|
});
|
||||||
|
|
||||||
|
if (stdin !== undefined) {
|
||||||
|
child.stdin.write(stdin);
|
||||||
|
child.stdin.end();
|
||||||
|
} else {
|
||||||
|
child.stdin.end();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface GnomeKeyringDeps {
|
||||||
|
run?: RunCommand;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GnomeKeyringStore implements SecretStore {
|
||||||
|
private readonly run: RunCommand;
|
||||||
|
|
||||||
|
constructor(deps?: GnomeKeyringDeps) {
|
||||||
|
this.run = deps?.run ?? defaultRunCommand;
|
||||||
|
}
|
||||||
|
|
||||||
|
backend(): string {
|
||||||
|
return 'gnome-keyring';
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(key: string): Promise<string | null> {
|
||||||
|
try {
|
||||||
|
const { stdout, code } = await this.run(
|
||||||
|
'secret-tool', ['lookup', 'service', SERVICE, 'key', key],
|
||||||
|
);
|
||||||
|
if (code !== 0 || !stdout) return null;
|
||||||
|
return stdout;
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async set(key: string, value: string): Promise<void> {
|
||||||
|
const { code } = await this.run(
|
||||||
|
'secret-tool',
|
||||||
|
['store', '--label', `mcpctl: ${key}`, 'service', SERVICE, 'key', key],
|
||||||
|
value,
|
||||||
|
);
|
||||||
|
if (code !== 0) {
|
||||||
|
throw new Error(`secret-tool store exited with code ${code}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(key: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const { code } = await this.run(
|
||||||
|
'secret-tool', ['clear', 'service', SERVICE, 'key', key],
|
||||||
|
);
|
||||||
|
return code === 0;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static async isAvailable(deps?: { run?: RunCommand }): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
if (deps?.run) {
|
||||||
|
const { code } = await deps.run('secret-tool', ['--version']);
|
||||||
|
return code === 0;
|
||||||
|
}
|
||||||
|
await execFileAsync('secret-tool', ['--version'], { timeout: 3000 });
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
15
src/shared/src/secrets/index.ts
Normal file
15
src/shared/src/secrets/index.ts
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
export type { SecretStore, SecretStoreDeps } from './types.js';
|
||||||
|
export { FileSecretStore } from './file-store.js';
|
||||||
|
export { GnomeKeyringStore } from './gnome-keyring.js';
|
||||||
|
export type { GnomeKeyringDeps, RunCommand } from './gnome-keyring.js';
|
||||||
|
|
||||||
|
import { GnomeKeyringStore } from './gnome-keyring.js';
|
||||||
|
import { FileSecretStore } from './file-store.js';
|
||||||
|
import type { SecretStore, SecretStoreDeps } from './types.js';
|
||||||
|
|
||||||
|
export async function createSecretStore(deps?: SecretStoreDeps): Promise<SecretStore> {
|
||||||
|
if (await GnomeKeyringStore.isAvailable()) {
|
||||||
|
return new GnomeKeyringStore();
|
||||||
|
}
|
||||||
|
return new FileSecretStore(deps);
|
||||||
|
}
|
||||||
10
src/shared/src/secrets/types.ts
Normal file
10
src/shared/src/secrets/types.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
export interface SecretStore {
|
||||||
|
get(key: string): Promise<string | null>;
|
||||||
|
set(key: string, value: string): Promise<void>;
|
||||||
|
delete(key: string): Promise<boolean>;
|
||||||
|
backend(): string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SecretStoreDeps {
|
||||||
|
configDir?: string;
|
||||||
|
}
|
||||||
24
src/shared/tests/secrets/factory.test.ts
Normal file
24
src/shared/tests/secrets/factory.test.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import { describe, it, expect, vi, afterEach } from 'vitest';
|
||||||
|
import { createSecretStore } from '../../src/secrets/index.js';
|
||||||
|
import { GnomeKeyringStore } from '../../src/secrets/gnome-keyring.js';
|
||||||
|
import { FileSecretStore } from '../../src/secrets/file-store.js';
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.restoreAllMocks();
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('createSecretStore', () => {
|
||||||
|
it('returns GnomeKeyringStore when secret-tool is available', async () => {
|
||||||
|
vi.spyOn(GnomeKeyringStore, 'isAvailable').mockResolvedValue(true);
|
||||||
|
const store = await createSecretStore();
|
||||||
|
expect(store.backend()).toBe('gnome-keyring');
|
||||||
|
expect(store).toBeInstanceOf(GnomeKeyringStore);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns FileSecretStore when secret-tool is not available', async () => {
|
||||||
|
vi.spyOn(GnomeKeyringStore, 'isAvailable').mockResolvedValue(false);
|
||||||
|
const store = await createSecretStore();
|
||||||
|
expect(store.backend()).toBe('file');
|
||||||
|
expect(store).toBeInstanceOf(FileSecretStore);
|
||||||
|
});
|
||||||
|
});
|
||||||
93
src/shared/tests/secrets/file-store.test.ts
Normal file
93
src/shared/tests/secrets/file-store.test.ts
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||||
|
import { mkdtempSync, rmSync, statSync, existsSync, writeFileSync } from 'node:fs';
|
||||||
|
import { join } from 'node:path';
|
||||||
|
import { tmpdir } from 'node:os';
|
||||||
|
import { FileSecretStore } from '../../src/secrets/file-store.js';
|
||||||
|
|
||||||
|
let tempDir: string;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-secrets-test-'));
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
rmSync(tempDir, { recursive: true, force: true });
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('FileSecretStore', () => {
|
||||||
|
it('returns null for missing key', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
expect(await store.get('nonexistent')).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('stores and retrieves a secret', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
await store.set('api-key', 'sk-12345');
|
||||||
|
expect(await store.get('api-key')).toBe('sk-12345');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('overwrites existing values', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
await store.set('api-key', 'old-value');
|
||||||
|
await store.set('api-key', 'new-value');
|
||||||
|
expect(await store.get('api-key')).toBe('new-value');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('stores multiple keys', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
await store.set('key-a', 'value-a');
|
||||||
|
await store.set('key-b', 'value-b');
|
||||||
|
expect(await store.get('key-a')).toBe('value-a');
|
||||||
|
expect(await store.get('key-b')).toBe('value-b');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('deletes a key', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
await store.set('api-key', 'sk-12345');
|
||||||
|
expect(await store.delete('api-key')).toBe(true);
|
||||||
|
expect(await store.get('api-key')).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false when deleting nonexistent key', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
expect(await store.delete('nonexistent')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('sets 0600 permissions on secrets file', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
await store.set('api-key', 'sk-12345');
|
||||||
|
const stat = statSync(join(tempDir, 'secrets'));
|
||||||
|
expect(stat.mode & 0o777).toBe(0o600);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('creates config dir if missing', async () => {
|
||||||
|
const nested = join(tempDir, 'sub', 'dir');
|
||||||
|
const store = new FileSecretStore({ configDir: nested });
|
||||||
|
await store.set('api-key', 'sk-12345');
|
||||||
|
expect(existsSync(join(nested, 'secrets'))).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('recovers from corrupted JSON', async () => {
|
||||||
|
writeFileSync(join(tempDir, 'secrets'), 'NOT JSON!!!', 'utf-8');
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
// Should not throw, returns null for any key
|
||||||
|
expect(await store.get('api-key')).toBeNull();
|
||||||
|
// Should be able to write over corrupted file
|
||||||
|
await store.set('api-key', 'fresh-value');
|
||||||
|
expect(await store.get('api-key')).toBe('fresh-value');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('reports file backend', () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
expect(store.backend()).toBe('file');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('preserves other keys on delete', async () => {
|
||||||
|
const store = new FileSecretStore({ configDir: tempDir });
|
||||||
|
await store.set('key-a', 'value-a');
|
||||||
|
await store.set('key-b', 'value-b');
|
||||||
|
await store.delete('key-a');
|
||||||
|
expect(await store.get('key-a')).toBeNull();
|
||||||
|
expect(await store.get('key-b')).toBe('value-b');
|
||||||
|
});
|
||||||
|
});
|
||||||
125
src/shared/tests/secrets/gnome-keyring.test.ts
Normal file
125
src/shared/tests/secrets/gnome-keyring.test.ts
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
import { describe, it, expect, vi } from 'vitest';
|
||||||
|
import { GnomeKeyringStore } from '../../src/secrets/gnome-keyring.js';
|
||||||
|
import type { RunCommand } from '../../src/secrets/gnome-keyring.js';
|
||||||
|
|
||||||
|
function mockRun(
|
||||||
|
responses: Record<string, { stdout: string; code: number }>,
|
||||||
|
): RunCommand {
|
||||||
|
return vi.fn(async (cmd: string, args: string[], _stdin?: string) => {
|
||||||
|
const key = `${cmd} ${args.join(' ')}`;
|
||||||
|
for (const [pattern, response] of Object.entries(responses)) {
|
||||||
|
if (key.includes(pattern)) return response;
|
||||||
|
}
|
||||||
|
return { stdout: '', code: 1 };
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('GnomeKeyringStore', () => {
|
||||||
|
describe('get', () => {
|
||||||
|
it('returns value on success', async () => {
|
||||||
|
const run = mockRun({ 'lookup': { stdout: 'my-secret', code: 0 } });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.get('api-key')).toBe('my-secret');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns null on exit code 1', async () => {
|
||||||
|
const run = mockRun({ 'lookup': { stdout: '', code: 1 } });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.get('api-key')).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns null on empty stdout', async () => {
|
||||||
|
const run = mockRun({ 'lookup': { stdout: '', code: 0 } });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.get('api-key')).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns null on error', async () => {
|
||||||
|
const run = vi.fn().mockRejectedValue(new Error('timeout'));
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.get('api-key')).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('calls secret-tool with correct args', async () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: 'val', code: 0 });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
await store.get('my-key');
|
||||||
|
expect(run).toHaveBeenCalledWith(
|
||||||
|
'secret-tool',
|
||||||
|
['lookup', 'service', 'mcpctl', 'key', 'my-key'],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('set', () => {
|
||||||
|
it('calls secret-tool store with value as stdin', async () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: '', code: 0 });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
await store.set('api-key', 'secret-value');
|
||||||
|
expect(run).toHaveBeenCalledWith(
|
||||||
|
'secret-tool',
|
||||||
|
['store', '--label', 'mcpctl: api-key', 'service', 'mcpctl', 'key', 'api-key'],
|
||||||
|
'secret-value',
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('throws on non-zero exit code', async () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: '', code: 1 });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
await expect(store.set('api-key', 'val')).rejects.toThrow('exited with code 1');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('delete', () => {
|
||||||
|
it('returns true on success', async () => {
|
||||||
|
const run = mockRun({ 'clear': { stdout: '', code: 0 } });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.delete('api-key')).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false on failure', async () => {
|
||||||
|
const run = mockRun({ 'clear': { stdout: '', code: 1 } });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.delete('api-key')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false on error', async () => {
|
||||||
|
const run = vi.fn().mockRejectedValue(new Error('fail'));
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(await store.delete('api-key')).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('calls secret-tool clear with correct args', async () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: '', code: 0 });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
await store.delete('my-key');
|
||||||
|
expect(run).toHaveBeenCalledWith(
|
||||||
|
'secret-tool',
|
||||||
|
['clear', 'service', 'mcpctl', 'key', 'my-key'],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('isAvailable', () => {
|
||||||
|
it('returns true when secret-tool exists', async () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: '0.20', code: 0 });
|
||||||
|
expect(await GnomeKeyringStore.isAvailable({ run })).toBe(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false when secret-tool not found', async () => {
|
||||||
|
const run = vi.fn().mockRejectedValue(new Error('ENOENT'));
|
||||||
|
expect(await GnomeKeyringStore.isAvailable({ run })).toBe(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns false on non-zero exit', async () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: '', code: 127 });
|
||||||
|
expect(await GnomeKeyringStore.isAvailable({ run })).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it('reports gnome-keyring backend', () => {
|
||||||
|
const run = vi.fn().mockResolvedValue({ stdout: '', code: 0 });
|
||||||
|
const store = new GnomeKeyringStore({ run });
|
||||||
|
expect(store.backend()).toBe('gnome-keyring');
|
||||||
|
});
|
||||||
|
});
|
||||||
Reference in New Issue
Block a user