feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
Some checks failed
CI / lint (pull_request) Has been cancelled
CI / typecheck (pull_request) Has been cancelled
CI / test (pull_request) Has been cancelled
CI / build (pull_request) Has been cancelled
CI / package (pull_request) Has been cancelled

- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Michal
2026-02-22 11:42:06 +00:00
parent a4fe5fdbe2
commit b8c5cf718a
82 changed files with 5832 additions and 123 deletions

View File

@@ -2,7 +2,8 @@ import http from 'node:http';
export interface ApiClientOptions {
baseUrl: string;
timeout?: number;
timeout?: number | undefined;
token?: string | undefined;
}
export interface ApiResponse<T = unknown> {
@@ -20,16 +21,20 @@ export class ApiError extends Error {
}
}
function request<T>(method: string, url: string, timeout: number, body?: unknown): Promise<ApiResponse<T>> {
function request<T>(method: string, url: string, timeout: number, body?: unknown, token?: string): Promise<ApiResponse<T>> {
return new Promise((resolve, reject) => {
const parsed = new URL(url);
const headers: Record<string, string> = { 'Content-Type': 'application/json' };
if (token) {
headers['Authorization'] = `Bearer ${token}`;
}
const opts: http.RequestOptions = {
hostname: parsed.hostname,
port: parsed.port,
path: parsed.pathname + parsed.search,
method,
timeout,
headers: { 'Content-Type': 'application/json' },
headers,
};
const req = http.request(opts, (res) => {
@@ -64,28 +69,30 @@ function request<T>(method: string, url: string, timeout: number, body?: unknown
export class ApiClient {
private baseUrl: string;
private timeout: number;
private token?: string | undefined;
constructor(opts: ApiClientOptions) {
this.baseUrl = opts.baseUrl.replace(/\/$/, '');
this.timeout = opts.timeout ?? 10000;
this.token = opts.token;
}
async get<T = unknown>(path: string): Promise<T> {
const res = await request<T>('GET', `${this.baseUrl}${path}`, this.timeout);
const res = await request<T>('GET', `${this.baseUrl}${path}`, this.timeout, undefined, this.token);
return res.data;
}
async post<T = unknown>(path: string, body?: unknown): Promise<T> {
const res = await request<T>('POST', `${this.baseUrl}${path}`, this.timeout, body);
const res = await request<T>('POST', `${this.baseUrl}${path}`, this.timeout, body, this.token);
return res.data;
}
async put<T = unknown>(path: string, body?: unknown): Promise<T> {
const res = await request<T>('PUT', `${this.baseUrl}${path}`, this.timeout, body);
const res = await request<T>('PUT', `${this.baseUrl}${path}`, this.timeout, body, this.token);
return res.data;
}
async delete(path: string): Promise<void> {
await request('DELETE', `${this.baseUrl}${path}`, this.timeout);
await request('DELETE', `${this.baseUrl}${path}`, this.timeout, undefined, this.token);
}
}

View File

@@ -0,0 +1,50 @@
import { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync, chmodSync } from 'node:fs';
import { join } from 'node:path';
import { homedir } from 'node:os';
export interface StoredCredentials {
token: string;
mcpdUrl: string;
user: string;
expiresAt?: string;
}
export interface CredentialsDeps {
configDir: string;
}
function defaultConfigDir(): string {
return join(homedir(), '.mcpctl');
}
function credentialsPath(deps?: Partial<CredentialsDeps>): string {
return join(deps?.configDir ?? defaultConfigDir(), 'credentials');
}
export function saveCredentials(creds: StoredCredentials, deps?: Partial<CredentialsDeps>): void {
const dir = deps?.configDir ?? defaultConfigDir();
if (!existsSync(dir)) {
mkdirSync(dir, { recursive: true });
}
const path = credentialsPath(deps);
writeFileSync(path, JSON.stringify(creds, null, 2) + '\n', 'utf-8');
chmodSync(path, 0o600);
}
export function loadCredentials(deps?: Partial<CredentialsDeps>): StoredCredentials | null {
const path = credentialsPath(deps);
if (!existsSync(path)) {
return null;
}
const raw = readFileSync(path, 'utf-8');
return JSON.parse(raw) as StoredCredentials;
}
export function deleteCredentials(deps?: Partial<CredentialsDeps>): boolean {
const path = credentialsPath(deps);
if (!existsSync(path)) {
return false;
}
unlinkSync(path);
return true;
}

View File

@@ -0,0 +1,2 @@
export { saveCredentials, loadCredentials, deleteCredentials } from './credentials.js';
export type { StoredCredentials, CredentialsDeps } from './credentials.js';

View File

@@ -0,0 +1,148 @@
import { Command } from 'commander';
import http from 'node:http';
import { loadConfig } from '../config/index.js';
import type { ConfigLoaderDeps } from '../config/index.js';
import { saveCredentials, loadCredentials, deleteCredentials } from '../auth/index.js';
import type { CredentialsDeps } from '../auth/index.js';
export interface PromptDeps {
input(message: string): Promise<string>;
password(message: string): Promise<string>;
}
export interface AuthCommandDeps {
configDeps: Partial<ConfigLoaderDeps>;
credentialsDeps: Partial<CredentialsDeps>;
prompt: PromptDeps;
log: (...args: string[]) => void;
loginRequest: (mcpdUrl: string, email: string, password: string) => Promise<LoginResponse>;
logoutRequest: (mcpdUrl: string, token: string) => Promise<void>;
}
interface LoginResponse {
token: string;
user: { email: string };
}
function defaultLoginRequest(mcpdUrl: string, email: string, password: string): Promise<LoginResponse> {
return new Promise((resolve, reject) => {
const url = new URL('/api/v1/auth/login', mcpdUrl);
const body = JSON.stringify({ email, password });
const opts: http.RequestOptions = {
hostname: url.hostname,
port: url.port,
path: url.pathname,
method: 'POST',
timeout: 10000,
headers: { 'Content-Type': 'application/json', 'Content-Length': Buffer.byteLength(body) },
};
const req = http.request(opts, (res) => {
const chunks: Buffer[] = [];
res.on('data', (chunk: Buffer) => chunks.push(chunk));
res.on('end', () => {
const raw = Buffer.concat(chunks).toString('utf-8');
if (res.statusCode === 401) {
reject(new Error('Invalid credentials'));
return;
}
if ((res.statusCode ?? 0) >= 400) {
reject(new Error(`Login failed (${res.statusCode}): ${raw}`));
return;
}
resolve(JSON.parse(raw) as LoginResponse);
});
});
req.on('error', (err) => reject(new Error(`Cannot reach mcpd: ${err.message}`)));
req.on('timeout', () => { req.destroy(); reject(new Error('Login request timed out')); });
req.write(body);
req.end();
});
}
function defaultLogoutRequest(mcpdUrl: string, token: string): Promise<void> {
return new Promise((resolve) => {
const url = new URL('/api/v1/auth/logout', mcpdUrl);
const opts: http.RequestOptions = {
hostname: url.hostname,
port: url.port,
path: url.pathname,
method: 'POST',
timeout: 10000,
headers: { 'Authorization': `Bearer ${token}` },
};
const req = http.request(opts, (res) => {
res.resume();
res.on('end', () => resolve());
});
req.on('error', () => resolve()); // Don't fail logout on network errors
req.on('timeout', () => { req.destroy(); resolve(); });
req.end();
});
}
async function defaultInput(message: string): Promise<string> {
const { default: inquirer } = await import('inquirer');
const { answer } = await inquirer.prompt([{ type: 'input', name: 'answer', message }]);
return answer as string;
}
async function defaultPassword(message: string): Promise<string> {
const { default: inquirer } = await import('inquirer');
const { answer } = await inquirer.prompt([{ type: 'password', name: 'answer', message }]);
return answer as string;
}
const defaultDeps: AuthCommandDeps = {
configDeps: {},
credentialsDeps: {},
prompt: { input: defaultInput, password: defaultPassword },
log: (...args) => console.log(...args),
loginRequest: defaultLoginRequest,
logoutRequest: defaultLogoutRequest,
};
export function createLoginCommand(deps?: Partial<AuthCommandDeps>): Command {
const { configDeps, credentialsDeps, prompt, log, loginRequest } = { ...defaultDeps, ...deps };
return new Command('login')
.description('Authenticate with mcpd')
.option('--mcpd-url <url>', 'mcpd URL to authenticate against')
.action(async (opts: { mcpdUrl?: string }) => {
const config = loadConfig(configDeps);
const mcpdUrl = opts.mcpdUrl ?? config.mcpdUrl;
const email = await prompt.input('Email:');
const password = await prompt.password('Password:');
try {
const result = await loginRequest(mcpdUrl, email, password);
saveCredentials({
token: result.token,
mcpdUrl,
user: result.user.email,
}, credentialsDeps);
log(`Logged in as ${result.user.email}`);
} catch (err) {
log(`Login failed: ${(err as Error).message}`);
process.exitCode = 1;
}
});
}
export function createLogoutCommand(deps?: Partial<AuthCommandDeps>): Command {
const { credentialsDeps, log, logoutRequest } = { ...defaultDeps, ...deps };
return new Command('logout')
.description('Log out and remove stored credentials')
.action(async () => {
const creds = loadCredentials(credentialsDeps);
if (!creds) {
log('Not logged in');
return;
}
await logoutRequest(creds.mcpdUrl, creds.token);
deleteCredentials(credentialsDeps);
log('Logged out successfully');
});
}

View File

@@ -41,6 +41,9 @@ export function createConfigCommand(deps?: Partial<ConfigCommandDeps>): Command
updates[key] = parseInt(value, 10);
} else if (key === 'registries') {
updates[key] = value.split(',').map((s) => s.trim());
} else if (key === 'daemonUrl') {
// Backward compat: map daemonUrl to mcplocalUrl
updates['mcplocalUrl'] = value;
} else {
updates[key] = value;
}

View File

@@ -2,16 +2,19 @@ import { Command } from 'commander';
import http from 'node:http';
import { loadConfig } from '../config/index.js';
import type { ConfigLoaderDeps } from '../config/index.js';
import { loadCredentials } from '../auth/index.js';
import type { CredentialsDeps } from '../auth/index.js';
import { formatJson, formatYaml } from '../formatters/index.js';
import { APP_VERSION } from '@mcpctl/shared';
export interface StatusCommandDeps {
configDeps: Partial<ConfigLoaderDeps>;
credentialsDeps: Partial<CredentialsDeps>;
log: (...args: string[]) => void;
checkDaemon: (url: string) => Promise<boolean>;
checkHealth: (url: string) => Promise<boolean>;
}
function defaultCheckDaemon(url: string): Promise<boolean> {
function defaultCheckHealth(url: string): Promise<boolean> {
return new Promise((resolve) => {
const req = http.get(`${url}/health`, { timeout: 3000 }, (res) => {
resolve(res.statusCode !== undefined && res.statusCode >= 200 && res.statusCode < 400);
@@ -27,24 +30,33 @@ function defaultCheckDaemon(url: string): Promise<boolean> {
const defaultDeps: StatusCommandDeps = {
configDeps: {},
credentialsDeps: {},
log: (...args) => console.log(...args),
checkDaemon: defaultCheckDaemon,
checkHealth: defaultCheckHealth,
};
export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command {
const { configDeps, log, checkDaemon } = { ...defaultDeps, ...deps };
const { configDeps, credentialsDeps, log, checkHealth } = { ...defaultDeps, ...deps };
return new Command('status')
.description('Show mcpctl status and connectivity')
.option('-o, --output <format>', 'output format (table, json, yaml)', 'table')
.action(async (opts: { output: string }) => {
const config = loadConfig(configDeps);
const daemonReachable = await checkDaemon(config.daemonUrl);
const creds = loadCredentials(credentialsDeps);
const [mcplocalReachable, mcpdReachable] = await Promise.all([
checkHealth(config.mcplocalUrl),
checkHealth(config.mcpdUrl),
]);
const status = {
version: APP_VERSION,
daemonUrl: config.daemonUrl,
daemonReachable,
mcplocalUrl: config.mcplocalUrl,
mcplocalReachable,
mcpdUrl: config.mcpdUrl,
mcpdReachable,
auth: creds ? { user: creds.user } : null,
registries: config.registries,
outputFormat: config.outputFormat,
};
@@ -55,7 +67,9 @@ export function createStatusCommand(deps?: Partial<StatusCommandDeps>): Command
log(formatYaml(status));
} else {
log(`mcpctl v${status.version}`);
log(`Daemon: ${status.daemonUrl} (${daemonReachable ? 'connected' : 'unreachable'})`);
log(`mcplocal: ${status.mcplocalUrl} (${mcplocalReachable ? 'connected' : 'unreachable'})`);
log(`mcpd: ${status.mcpdUrl} (${mcpdReachable ? 'connected' : 'unreachable'})`);
log(`Auth: ${creds ? `logged in as ${creds.user}` : 'not logged in'}`);
log(`Registries: ${status.registries.join(', ')}`);
log(`Output: ${status.outputFormat}`);
}

View File

@@ -1,8 +1,12 @@
import { z } from 'zod';
export const McpctlConfigSchema = z.object({
/** mcpd daemon endpoint */
daemonUrl: z.string().default('http://localhost:3000'),
/** mcplocal daemon endpoint (local LLM pre-processing proxy) */
mcplocalUrl: z.string().default('http://localhost:3200'),
/** mcpd daemon endpoint (remote instance manager) */
mcpdUrl: z.string().default('http://localhost:3100'),
/** @deprecated Use mcplocalUrl instead. Kept for backward compatibility. */
daemonUrl: z.string().optional(),
/** Active registries for search */
registries: z.array(z.enum(['official', 'glama', 'smithery'])).default(['official', 'glama', 'smithery']),
/** Cache TTL in milliseconds */
@@ -15,6 +19,13 @@ export const McpctlConfigSchema = z.object({
outputFormat: z.enum(['table', 'json', 'yaml']).default('table'),
/** Smithery API key */
smitheryApiKey: z.string().optional(),
}).transform((cfg) => {
// Backward compatibility: if old daemonUrl is set but mcplocalUrl wasn't explicitly changed,
// use daemonUrl as mcplocalUrl
if (cfg.daemonUrl && cfg.mcplocalUrl === 'http://localhost:3200') {
return { ...cfg, mcplocalUrl: cfg.daemonUrl };
}
return cfg;
});
export type McpctlConfig = z.infer<typeof McpctlConfigSchema>;

View File

@@ -11,8 +11,10 @@ import { createSetupCommand } from './commands/setup.js';
import { createClaudeCommand } from './commands/claude.js';
import { createProjectCommand } from './commands/project.js';
import { createBackupCommand, createRestoreCommand } from './commands/backup.js';
import { createLoginCommand, createLogoutCommand } from './commands/auth.js';
import { ApiClient } from './api-client.js';
import { loadConfig } from './config/index.js';
import { loadCredentials } from './auth/index.js';
export function createProgram(): Command {
const program = new Command()
@@ -20,15 +22,28 @@ export function createProgram(): Command {
.description('Manage MCP servers like kubectl manages containers')
.version(APP_VERSION, '-v, --version')
.option('-o, --output <format>', 'output format (table, json, yaml)', 'table')
.option('--daemon-url <url>', 'mcpd daemon URL');
.option('--daemon-url <url>', 'mcplocal daemon URL')
.option('--direct', 'bypass mcplocal and connect directly to mcpd');
program.addCommand(createConfigCommand());
program.addCommand(createStatusCommand());
program.addCommand(createLoginCommand());
program.addCommand(createLogoutCommand());
// Create API-backed commands
// Resolve target URL: --direct goes to mcpd, default goes to mcplocal
const config = loadConfig();
const daemonUrl = program.opts().daemonUrl ?? config.daemonUrl;
const client = new ApiClient({ baseUrl: daemonUrl });
const creds = loadCredentials();
const opts = program.opts();
let baseUrl: string;
if (opts.daemonUrl) {
baseUrl = opts.daemonUrl as string;
} else if (opts.direct) {
baseUrl = config.mcpdUrl;
} else {
baseUrl = config.mcplocalUrl;
}
const client = new ApiClient({ baseUrl, token: creds?.token ?? undefined });
const fetchResource = async (resource: string, id?: string): Promise<unknown[]> => {
if (id) {

View File

@@ -74,4 +74,27 @@ describe('ApiClient', () => {
const client = new ApiClient({ baseUrl: 'http://localhost:1' });
await expect(client.get('/anything')).rejects.toThrow();
});
it('sends Authorization header when token provided', async () => {
// We need a separate server to check the header
let receivedAuth = '';
const authServer = http.createServer((req, res) => {
receivedAuth = req.headers['authorization'] ?? '';
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ ok: true }));
});
const authPort = await new Promise<number>((resolve) => {
authServer.listen(0, () => {
const addr = authServer.address();
if (addr && typeof addr === 'object') resolve(addr.port);
});
});
try {
const client = new ApiClient({ baseUrl: `http://localhost:${authPort}`, token: 'my-token' });
await client.get('/test');
expect(receivedAuth).toBe('Bearer my-token');
} finally {
authServer.close();
}
});
});

View File

@@ -0,0 +1,59 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { mkdtempSync, rmSync, statSync, existsSync } from 'node:fs';
import { join } from 'node:path';
import { tmpdir } from 'node:os';
import { saveCredentials, loadCredentials, deleteCredentials } from '../../src/auth/index.js';
let tempDir: string;
beforeEach(() => {
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-auth-test-'));
});
afterEach(() => {
rmSync(tempDir, { recursive: true, force: true });
});
describe('saveCredentials', () => {
it('saves credentials file', () => {
saveCredentials({ token: 'tok123', mcpdUrl: 'http://x:3100', user: 'alice@test.com' }, { configDir: tempDir });
expect(existsSync(join(tempDir, 'credentials'))).toBe(true);
});
it('sets 0600 permissions', () => {
saveCredentials({ token: 'tok123', mcpdUrl: 'http://x:3100', user: 'alice@test.com' }, { configDir: tempDir });
const stat = statSync(join(tempDir, 'credentials'));
expect(stat.mode & 0o777).toBe(0o600);
});
it('creates config dir if missing', () => {
const nested = join(tempDir, 'sub', 'dir');
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'bob' }, { configDir: nested });
expect(existsSync(join(nested, 'credentials'))).toBe(true);
});
});
describe('loadCredentials', () => {
it('returns null when no credentials file', () => {
expect(loadCredentials({ configDir: tempDir })).toBeNull();
});
it('round-trips credentials', () => {
const creds = { token: 'tok456', mcpdUrl: 'http://remote:3100', user: 'charlie@test.com', expiresAt: '2099-01-01' };
saveCredentials(creds, { configDir: tempDir });
const loaded = loadCredentials({ configDir: tempDir });
expect(loaded).toEqual(creds);
});
});
describe('deleteCredentials', () => {
it('returns false when no credentials file', () => {
expect(deleteCredentials({ configDir: tempDir })).toBe(false);
});
it('deletes credentials file', () => {
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'u' }, { configDir: tempDir });
expect(deleteCredentials({ configDir: tempDir })).toBe(true);
expect(existsSync(join(tempDir, 'credentials'))).toBe(false);
});
});

View File

@@ -0,0 +1,144 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { mkdtempSync, rmSync } from 'node:fs';
import { join } from 'node:path';
import { tmpdir } from 'node:os';
import { createLoginCommand, createLogoutCommand } from '../../src/commands/auth.js';
import { saveCredentials, loadCredentials } from '../../src/auth/index.js';
import { saveConfig, DEFAULT_CONFIG } from '../../src/config/index.js';
let tempDir: string;
let output: string[];
function log(...args: string[]) {
output.push(args.join(' '));
}
beforeEach(() => {
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-auth-cmd-test-'));
output = [];
});
afterEach(() => {
rmSync(tempDir, { recursive: true, force: true });
});
describe('login command', () => {
it('stores credentials on successful login', async () => {
const cmd = createLoginCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
prompt: {
input: async () => 'alice@test.com',
password: async () => 'secret123',
},
log,
loginRequest: async (_url, email, _password) => ({
token: 'session-token-123',
user: { email },
}),
logoutRequest: async () => {},
});
await cmd.parseAsync([], { from: 'user' });
expect(output[0]).toContain('Logged in as alice@test.com');
const creds = loadCredentials({ configDir: tempDir });
expect(creds).not.toBeNull();
expect(creds!.token).toBe('session-token-123');
expect(creds!.user).toBe('alice@test.com');
});
it('shows error on failed login', async () => {
const cmd = createLoginCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
prompt: {
input: async () => 'alice@test.com',
password: async () => 'wrong',
},
log,
loginRequest: async () => { throw new Error('Invalid credentials'); },
logoutRequest: async () => {},
});
await cmd.parseAsync([], { from: 'user' });
expect(output[0]).toContain('Login failed');
expect(output[0]).toContain('Invalid credentials');
const creds = loadCredentials({ configDir: tempDir });
expect(creds).toBeNull();
});
it('uses mcpdUrl from config', async () => {
saveConfig({ ...DEFAULT_CONFIG, mcpdUrl: 'http://custom:3100' }, { configDir: tempDir });
let capturedUrl = '';
const cmd = createLoginCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
prompt: {
input: async () => 'user@test.com',
password: async () => 'pass',
},
log,
loginRequest: async (url, email) => {
capturedUrl = url;
return { token: 'tok', user: { email } };
},
logoutRequest: async () => {},
});
await cmd.parseAsync([], { from: 'user' });
expect(capturedUrl).toBe('http://custom:3100');
});
it('allows --mcpd-url flag override', async () => {
let capturedUrl = '';
const cmd = createLoginCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
prompt: {
input: async () => 'user@test.com',
password: async () => 'pass',
},
log,
loginRequest: async (url, email) => {
capturedUrl = url;
return { token: 'tok', user: { email } };
},
logoutRequest: async () => {},
});
await cmd.parseAsync(['--mcpd-url', 'http://override:3100'], { from: 'user' });
expect(capturedUrl).toBe('http://override:3100');
});
});
describe('logout command', () => {
it('removes credentials on logout', async () => {
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'alice' }, { configDir: tempDir });
let logoutCalled = false;
const cmd = createLogoutCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
prompt: { input: async () => '', password: async () => '' },
log,
loginRequest: async () => ({ token: '', user: { email: '' } }),
logoutRequest: async () => { logoutCalled = true; },
});
await cmd.parseAsync([], { from: 'user' });
expect(output[0]).toContain('Logged out successfully');
expect(logoutCalled).toBe(true);
const creds = loadCredentials({ configDir: tempDir });
expect(creds).toBeNull();
});
it('shows not logged in when no credentials', async () => {
const cmd = createLogoutCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
prompt: { input: async () => '', password: async () => '' },
log,
loginRequest: async () => ({ token: '', user: { email: '' } }),
logoutRequest: async () => {},
});
await cmd.parseAsync([], { from: 'user' });
expect(output[0]).toContain('Not logged in');
});
});

View File

@@ -34,23 +34,38 @@ describe('config view', () => {
await cmd.parseAsync(['view'], { from: 'user' });
expect(output).toHaveLength(1);
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
expect(parsed['daemonUrl']).toBe('http://localhost:3000');
expect(parsed['mcplocalUrl']).toBe('http://localhost:3200');
expect(parsed['mcpdUrl']).toBe('http://localhost:3100');
});
it('outputs config as YAML with --output yaml', async () => {
const cmd = makeCommand();
await cmd.parseAsync(['view', '-o', 'yaml'], { from: 'user' });
expect(output[0]).toContain('daemonUrl:');
expect(output[0]).toContain('mcplocalUrl:');
});
});
describe('config set', () => {
it('sets a string value', async () => {
it('sets mcplocalUrl', async () => {
const cmd = makeCommand();
await cmd.parseAsync(['set', 'daemonUrl', 'http://new:9000'], { from: 'user' });
expect(output[0]).toContain('daemonUrl');
await cmd.parseAsync(['set', 'mcplocalUrl', 'http://new:9000'], { from: 'user' });
expect(output[0]).toContain('mcplocalUrl');
const config = loadConfig({ configDir: tempDir });
expect(config.daemonUrl).toBe('http://new:9000');
expect(config.mcplocalUrl).toBe('http://new:9000');
});
it('sets mcpdUrl', async () => {
const cmd = makeCommand();
await cmd.parseAsync(['set', 'mcpdUrl', 'http://remote:3100'], { from: 'user' });
const config = loadConfig({ configDir: tempDir });
expect(config.mcpdUrl).toBe('http://remote:3100');
});
it('maps daemonUrl to mcplocalUrl for backward compat', async () => {
const cmd = makeCommand();
await cmd.parseAsync(['set', 'daemonUrl', 'http://legacy:3000'], { from: 'user' });
const config = loadConfig({ configDir: tempDir });
expect(config.mcplocalUrl).toBe('http://legacy:3000');
});
it('sets cacheTTLMs as integer', async () => {
@@ -87,13 +102,13 @@ describe('config path', () => {
describe('config reset', () => {
it('resets to defaults', async () => {
// First set a custom value
saveConfig({ ...DEFAULT_CONFIG, daemonUrl: 'http://custom' }, { configDir: tempDir });
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://custom' }, { configDir: tempDir });
const cmd = makeCommand();
await cmd.parseAsync(['reset'], { from: 'user' });
expect(output[0]).toContain('reset');
const config = loadConfig({ configDir: tempDir });
expect(config.daemonUrl).toBe(DEFAULT_CONFIG.daemonUrl);
expect(config.mcplocalUrl).toBe(DEFAULT_CONFIG.mcplocalUrl);
});
});

View File

@@ -4,6 +4,7 @@ import { join } from 'node:path';
import { tmpdir } from 'node:os';
import { createStatusCommand } from '../../src/commands/status.js';
import { saveConfig, DEFAULT_CONFIG } from '../../src/config/index.js';
import { saveCredentials } from '../../src/auth/index.js';
let tempDir: string;
let output: string[];
@@ -25,67 +26,101 @@ describe('status command', () => {
it('shows status in table format', async () => {
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkDaemon: async () => true,
checkHealth: async () => true,
});
await cmd.parseAsync([], { from: 'user' });
expect(output.join('\n')).toContain('mcpctl v');
expect(output.join('\n')).toContain('connected');
const out = output.join('\n');
expect(out).toContain('mcpctl v');
expect(out).toContain('mcplocal:');
expect(out).toContain('mcpd:');
expect(out).toContain('connected');
});
it('shows unreachable when daemon is down', async () => {
it('shows unreachable when daemons are down', async () => {
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkDaemon: async () => false,
checkHealth: async () => false,
});
await cmd.parseAsync([], { from: 'user' });
expect(output.join('\n')).toContain('unreachable');
});
it('shows not logged in when no credentials', async () => {
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkHealth: async () => true,
});
await cmd.parseAsync([], { from: 'user' });
expect(output.join('\n')).toContain('not logged in');
});
it('shows logged in user when credentials exist', async () => {
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'alice@example.com' }, { configDir: tempDir });
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkHealth: async () => true,
});
await cmd.parseAsync([], { from: 'user' });
expect(output.join('\n')).toContain('logged in as alice@example.com');
});
it('shows status in JSON format', async () => {
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkDaemon: async () => true,
checkHealth: async () => true,
});
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
expect(parsed['version']).toBe('0.1.0');
expect(parsed['daemonReachable']).toBe(true);
expect(parsed['mcplocalReachable']).toBe(true);
expect(parsed['mcpdReachable']).toBe(true);
});
it('shows status in YAML format', async () => {
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkDaemon: async () => false,
checkHealth: async () => false,
});
await cmd.parseAsync(['-o', 'yaml'], { from: 'user' });
expect(output[0]).toContain('daemonReachable: false');
expect(output[0]).toContain('mcplocalReachable: false');
});
it('uses custom daemon URL from config', async () => {
saveConfig({ ...DEFAULT_CONFIG, daemonUrl: 'http://custom:5555' }, { configDir: tempDir });
let checkedUrl = '';
it('checks correct URLs from config', async () => {
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://local:3200', mcpdUrl: 'http://remote:3100' }, { configDir: tempDir });
const checkedUrls: string[] = [];
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkDaemon: async (url) => {
checkedUrl = url;
checkHealth: async (url) => {
checkedUrls.push(url);
return false;
},
});
await cmd.parseAsync([], { from: 'user' });
expect(checkedUrl).toBe('http://custom:5555');
expect(checkedUrls).toContain('http://local:3200');
expect(checkedUrls).toContain('http://remote:3100');
});
it('shows registries from config', async () => {
saveConfig({ ...DEFAULT_CONFIG, registries: ['official'] }, { configDir: tempDir });
const cmd = createStatusCommand({
configDeps: { configDir: tempDir },
credentialsDeps: { configDir: tempDir },
log,
checkDaemon: async () => true,
checkHealth: async () => true,
});
await cmd.parseAsync([], { from: 'user' });
expect(output.join('\n')).toContain('official');

View File

@@ -28,18 +28,25 @@ describe('loadConfig', () => {
});
it('loads config from file', () => {
saveConfig({ ...DEFAULT_CONFIG, daemonUrl: 'http://custom:5000' }, { configDir: tempDir });
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://custom:5000' }, { configDir: tempDir });
const config = loadConfig({ configDir: tempDir });
expect(config.daemonUrl).toBe('http://custom:5000');
expect(config.mcplocalUrl).toBe('http://custom:5000');
});
it('applies defaults for missing fields', () => {
const { writeFileSync } = require('node:fs') as typeof import('node:fs');
writeFileSync(join(tempDir, 'config.json'), '{"daemonUrl":"http://x:1"}');
writeFileSync(join(tempDir, 'config.json'), '{"mcplocalUrl":"http://x:1"}');
const config = loadConfig({ configDir: tempDir });
expect(config.daemonUrl).toBe('http://x:1');
expect(config.mcplocalUrl).toBe('http://x:1');
expect(config.registries).toEqual(['official', 'glama', 'smithery']);
});
it('backward compat: daemonUrl maps to mcplocalUrl', () => {
const { writeFileSync } = require('node:fs') as typeof import('node:fs');
writeFileSync(join(tempDir, 'config.json'), '{"daemonUrl":"http://old:3000"}');
const config = loadConfig({ configDir: tempDir });
expect(config.mcplocalUrl).toBe('http://old:3000');
});
});
describe('saveConfig', () => {
@@ -57,7 +64,7 @@ describe('saveConfig', () => {
it('round-trips configuration', () => {
const custom = {
...DEFAULT_CONFIG,
daemonUrl: 'http://custom:9000',
mcplocalUrl: 'http://custom:9000',
registries: ['official' as const],
outputFormat: 'json' as const,
};
@@ -70,14 +77,14 @@ describe('saveConfig', () => {
describe('mergeConfig', () => {
it('merges overrides into existing config', () => {
saveConfig(DEFAULT_CONFIG, { configDir: tempDir });
const merged = mergeConfig({ daemonUrl: 'http://new:1234' }, { configDir: tempDir });
expect(merged.daemonUrl).toBe('http://new:1234');
const merged = mergeConfig({ mcplocalUrl: 'http://new:1234' }, { configDir: tempDir });
expect(merged.mcplocalUrl).toBe('http://new:1234');
expect(merged.registries).toEqual(DEFAULT_CONFIG.registries);
});
it('works when no config file exists', () => {
const merged = mergeConfig({ outputFormat: 'yaml' }, { configDir: tempDir });
expect(merged.outputFormat).toBe('yaml');
expect(merged.daemonUrl).toBe('http://localhost:3000');
expect(merged.mcplocalUrl).toBe('http://localhost:3200');
});
});

View File

@@ -4,7 +4,8 @@ import { McpctlConfigSchema, DEFAULT_CONFIG } from '../../src/config/schema.js';
describe('McpctlConfigSchema', () => {
it('provides sensible defaults from empty object', () => {
const config = McpctlConfigSchema.parse({});
expect(config.daemonUrl).toBe('http://localhost:3000');
expect(config.mcplocalUrl).toBe('http://localhost:3200');
expect(config.mcpdUrl).toBe('http://localhost:3100');
expect(config.registries).toEqual(['official', 'glama', 'smithery']);
expect(config.cacheTTLMs).toBe(3_600_000);
expect(config.outputFormat).toBe('table');
@@ -15,7 +16,8 @@ describe('McpctlConfigSchema', () => {
it('validates a full config', () => {
const config = McpctlConfigSchema.parse({
daemonUrl: 'http://custom:4000',
mcplocalUrl: 'http://local:3200',
mcpdUrl: 'http://custom:4000',
registries: ['official'],
cacheTTLMs: 60_000,
httpProxy: 'http://proxy:8080',
@@ -23,11 +25,26 @@ describe('McpctlConfigSchema', () => {
outputFormat: 'json',
smitheryApiKey: 'sk-test',
});
expect(config.daemonUrl).toBe('http://custom:4000');
expect(config.mcplocalUrl).toBe('http://local:3200');
expect(config.mcpdUrl).toBe('http://custom:4000');
expect(config.registries).toEqual(['official']);
expect(config.outputFormat).toBe('json');
});
it('backward compat: maps daemonUrl to mcplocalUrl', () => {
const config = McpctlConfigSchema.parse({ daemonUrl: 'http://legacy:3000' });
expect(config.mcplocalUrl).toBe('http://legacy:3000');
expect(config.mcpdUrl).toBe('http://localhost:3100');
});
it('mcplocalUrl takes precedence over daemonUrl', () => {
const config = McpctlConfigSchema.parse({
daemonUrl: 'http://legacy:3000',
mcplocalUrl: 'http://explicit:3200',
});
expect(config.mcplocalUrl).toBe('http://explicit:3200');
});
it('rejects invalid registry names', () => {
expect(() => McpctlConfigSchema.parse({ registries: ['invalid'] })).toThrow();
});

View File

@@ -12,6 +12,8 @@ describe('CLI command registration (e2e)', () => {
expect(commandNames).toContain('config');
expect(commandNames).toContain('status');
expect(commandNames).toContain('login');
expect(commandNames).toContain('logout');
expect(commandNames).toContain('get');
expect(commandNames).toContain('describe');
expect(commandNames).toContain('instance');

View File

@@ -10,13 +10,14 @@ datasource db {
// ── Users ──
model User {
id String @id @default(cuid())
email String @unique
name String?
role Role @default(USER)
version Int @default(1)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
id String @id @default(cuid())
email String @unique
name String?
passwordHash String
role Role @default(USER)
version Int @default(1)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
sessions Session[]
auditLogs AuditLog[]

View File

@@ -20,11 +20,13 @@
"@mcpctl/db": "workspace:*",
"@mcpctl/shared": "workspace:*",
"@prisma/client": "^6.0.0",
"bcrypt": "^5.1.1",
"dockerode": "^4.0.9",
"fastify": "^5.0.0",
"zod": "^3.24.0"
},
"devDependencies": {
"@types/bcrypt": "^5.0.2",
"@types/dockerode": "^4.0.1",
"@types/node": "^25.3.0"
}

View File

@@ -21,6 +21,8 @@ import {
HealthAggregator,
BackupService,
RestoreService,
AuthService,
McpProxyService,
} from './services/index.js';
import {
registerMcpServerRoutes,
@@ -30,6 +32,8 @@ import {
registerAuditLogRoutes,
registerHealthMonitoringRoutes,
registerBackupRoutes,
registerAuthRoutes,
registerMcpProxyRoutes,
} from './routes/index.js';
async function main(): Promise<void> {
@@ -64,6 +68,8 @@ async function main(): Promise<void> {
const healthAggregator = new HealthAggregator(metricsCollector, orchestrator);
const backupService = new BackupService(serverRepo, profileRepo, projectRepo);
const restoreService = new RestoreService(serverRepo, profileRepo, projectRepo);
const authService = new AuthService(prisma);
const mcpProxyService = new McpProxyService(instanceRepo);
// Server
const app = await createServer(config, {
@@ -87,6 +93,12 @@ async function main(): Promise<void> {
registerAuditLogRoutes(app, auditLogService);
registerHealthMonitoringRoutes(app, { healthAggregator, metricsCollector });
registerBackupRoutes(app, { backupService, restoreService });
registerAuthRoutes(app, { authService });
registerMcpProxyRoutes(app, {
mcpProxyService,
auditLogService,
authDeps: { findSession: (token) => authService.findSession(token) },
});
// Start
await app.listen({ port: config.port, host: config.host });

View File

@@ -0,0 +1,31 @@
import type { FastifyInstance } from 'fastify';
import type { AuthService } from '../services/auth.service.js';
import { createAuthMiddleware } from '../middleware/auth.js';
export interface AuthRouteDeps {
authService: AuthService;
}
export function registerAuthRoutes(app: FastifyInstance, deps: AuthRouteDeps): void {
const authMiddleware = createAuthMiddleware({
findSession: (token) => deps.authService.findSession(token),
});
// POST /api/v1/auth/login — no auth required
app.post<{
Body: { email: string; password: string };
}>('/api/v1/auth/login', async (request) => {
const { email, password } = request.body;
const result = await deps.authService.login(email, password);
return result;
});
// POST /api/v1/auth/logout — auth required
app.post('/api/v1/auth/logout', { preHandler: [authMiddleware] }, async (request) => {
const header = request.headers.authorization;
// Auth middleware already validated the header; extract the token
const token = header!.slice(7);
await deps.authService.logout(token);
return { success: true };
});
}

View File

@@ -9,3 +9,7 @@ export { registerHealthMonitoringRoutes } from './health-monitoring.js';
export type { HealthMonitoringDeps } from './health-monitoring.js';
export { registerBackupRoutes } from './backup.js';
export type { BackupDeps } from './backup.js';
export { registerAuthRoutes } from './auth.js';
export type { AuthRouteDeps } from './auth.js';
export { registerMcpProxyRoutes } from './mcp-proxy.js';
export type { McpProxyRouteDeps } from './mcp-proxy.js';

View File

@@ -0,0 +1,37 @@
import type { FastifyInstance } from 'fastify';
import type { McpProxyService } from '../services/mcp-proxy-service.js';
import type { AuditLogService } from '../services/audit-log.service.js';
import { createAuthMiddleware, type AuthDeps } from '../middleware/auth.js';
export interface McpProxyRouteDeps {
mcpProxyService: McpProxyService;
auditLogService: AuditLogService;
authDeps: AuthDeps;
}
export function registerMcpProxyRoutes(app: FastifyInstance, deps: McpProxyRouteDeps): void {
const authMiddleware = createAuthMiddleware(deps.authDeps);
app.post<{
Body: {
serverId: string;
method: string;
params?: Record<string, unknown>;
};
}>('/api/v1/mcp/proxy', { preHandler: [authMiddleware] }, async (request) => {
const { serverId, method, params } = request.body;
const result = await deps.mcpProxyService.execute({ serverId, method, params });
// Log to audit with userId (set by auth middleware)
await deps.auditLogService.create({
userId: request.userId!,
action: 'MCP_PROXY',
resource: 'mcp-server',
resourceId: serverId,
details: { method, hasParams: params !== undefined },
});
return result;
});
}

View File

@@ -0,0 +1,66 @@
import { randomUUID } from 'node:crypto';
import type { PrismaClient } from '@prisma/client';
import bcrypt from 'bcrypt';
/** 30 days in milliseconds */
const SESSION_TTL_MS = 30 * 24 * 60 * 60 * 1000;
export interface LoginResult {
token: string;
expiresAt: Date;
user: { id: string; email: string; role: string };
}
export class AuthenticationError extends Error {
readonly statusCode = 401;
constructor(message: string) {
super(message);
this.name = 'AuthenticationError';
}
}
export class AuthService {
constructor(private readonly prisma: PrismaClient) {}
async login(email: string, password: string): Promise<LoginResult> {
const user = await this.prisma.user.findUnique({ where: { email } });
if (user === null) {
throw new AuthenticationError('Invalid email or password');
}
const valid = await bcrypt.compare(password, user.passwordHash);
if (!valid) {
throw new AuthenticationError('Invalid email or password');
}
const token = randomUUID();
const expiresAt = new Date(Date.now() + SESSION_TTL_MS);
await this.prisma.session.create({
data: {
token,
userId: user.id,
expiresAt,
},
});
return {
token,
expiresAt,
user: { id: user.id, email: user.email, role: user.role },
};
}
async logout(token: string): Promise<void> {
// Delete the session by token; ignore if already deleted
await this.prisma.session.deleteMany({ where: { token } });
}
async findSession(token: string): Promise<{ userId: string; expiresAt: Date } | null> {
const session = await this.prisma.session.findUnique({ where: { token } });
if (session === null) {
return null;
}
return { userId: session.userId, expiresAt: session.expiresAt };
}
}

View File

@@ -19,3 +19,7 @@ export { BackupService } from './backup/index.js';
export type { BackupBundle, BackupOptions } from './backup/index.js';
export { RestoreService } from './backup/index.js';
export type { RestoreOptions, RestoreResult, ConflictStrategy } from './backup/index.js';
export { AuthService, AuthenticationError } from './auth.service.js';
export type { LoginResult } from './auth.service.js';
export { McpProxyService } from './mcp-proxy-service.js';
export type { McpProxyRequest, McpProxyResponse } from './mcp-proxy-service.js';

View File

@@ -0,0 +1,76 @@
import type { McpInstance } from '@prisma/client';
import type { IMcpInstanceRepository } from '../repositories/interfaces.js';
import { NotFoundError } from './mcp-server.service.js';
import { InvalidStateError } from './instance.service.js';
export interface McpProxyRequest {
serverId: string;
method: string;
params?: Record<string, unknown> | undefined;
}
export interface McpProxyResponse {
jsonrpc: '2.0';
id: number;
result?: unknown;
error?: { code: number; message: string; data?: unknown };
}
export class McpProxyService {
constructor(private readonly instanceRepo: IMcpInstanceRepository) {}
async execute(request: McpProxyRequest): Promise<McpProxyResponse> {
// Find a running instance for this server
const instances = await this.instanceRepo.findAll(request.serverId);
const running = instances.find((i) => i.status === 'RUNNING');
if (!running) {
throw new NotFoundError(`No running instance found for server '${request.serverId}'`);
}
if (running.port === null || running.port === undefined) {
throw new InvalidStateError(
`Running instance '${running.id}' for server '${request.serverId}' has no port assigned`,
);
}
return this.sendJsonRpc(running, request.method, request.params);
}
private async sendJsonRpc(
instance: McpInstance,
method: string,
params?: Record<string, unknown>,
): Promise<McpProxyResponse> {
const url = `http://localhost:${instance.port}`;
const body: Record<string, unknown> = {
jsonrpc: '2.0',
id: 1,
method,
};
if (params !== undefined) {
body.params = params;
}
const response = await fetch(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(body),
});
if (!response.ok) {
return {
jsonrpc: '2.0',
id: 1,
error: {
code: -32000,
message: `MCP server returned HTTP ${response.status}: ${response.statusText}`,
},
};
}
const result = (await response.json()) as McpProxyResponse;
return result;
}
}

View File

@@ -1,5 +1,5 @@
{
"name": "@mcpctl/local-proxy",
"name": "@mcpctl/mcplocal",
"version": "0.1.0",
"private": true,
"type": "module",
@@ -14,8 +14,10 @@
"test:run": "vitest run"
},
"dependencies": {
"@fastify/cors": "^10.0.0",
"@mcpctl/shared": "workspace:*",
"@modelcontextprotocol/sdk": "^1.0.0"
"@modelcontextprotocol/sdk": "^1.0.0",
"fastify": "^5.0.0"
},
"devDependencies": {
"@types/node": "^25.3.0"

View File

@@ -0,0 +1,39 @@
import type { McpdClient } from './http/mcpd-client.js';
import type { McpRouter } from './router.js';
import { McpdUpstream } from './upstream/mcpd.js';
interface McpdServer {
id: string;
name: string;
transport: string;
status?: string;
}
/**
* Discovers MCP servers from mcpd and registers them as upstreams in the router.
* Called periodically or on demand to keep the router in sync with mcpd.
*/
export async function refreshUpstreams(router: McpRouter, mcpdClient: McpdClient): Promise<string[]> {
const servers = await mcpdClient.get<McpdServer[]>('/api/v1/servers');
const registered: string[] = [];
// Remove stale upstreams
const currentNames = new Set(router.getUpstreamNames());
const serverNames = new Set(servers.map((s) => s.name));
for (const name of currentNames) {
if (!serverNames.has(name)) {
router.removeUpstream(name);
}
}
// Add/update upstreams for each server
for (const server of servers) {
if (!currentNames.has(server.name)) {
const upstream = new McpdUpstream(server.id, server.name, mcpdClient);
router.addUpstream(upstream);
}
registered.push(server.name);
}
return registered;
}

View File

@@ -0,0 +1,8 @@
export { TieredHealthMonitor } from './tiered.js';
export type {
TieredHealthStatus,
TieredHealthMonitorDeps,
McplocalHealth,
McpdHealth,
InstanceHealth,
} from './tiered.js';

View File

@@ -0,0 +1,98 @@
import type { McpdClient } from '../http/mcpd-client.js';
import type { ProviderRegistry } from '../providers/registry.js';
export interface McplocalHealth {
status: 'healthy' | 'degraded';
uptime: number;
llmProvider: string | null;
}
export interface McpdHealth {
status: 'connected' | 'disconnected';
url: string;
}
export interface InstanceHealth {
name: string;
status: string;
}
export interface TieredHealthStatus {
mcplocal: McplocalHealth;
mcpd: McpdHealth;
instances: InstanceHealth[];
}
export interface TieredHealthMonitorDeps {
mcpdClient: McpdClient | null;
providerRegistry: ProviderRegistry;
mcpdUrl: string;
}
/**
* Monitors health across all tiers: mcplocal itself, the mcpd daemon, and MCP server instances.
* Aggregates status from multiple sources into a single TieredHealthStatus.
*/
export class TieredHealthMonitor {
private readonly mcpdClient: McpdClient | null;
private readonly providerRegistry: ProviderRegistry;
private readonly mcpdUrl: string;
private readonly startTime: number;
constructor(deps: TieredHealthMonitorDeps) {
this.mcpdClient = deps.mcpdClient;
this.providerRegistry = deps.providerRegistry;
this.mcpdUrl = deps.mcpdUrl;
this.startTime = Date.now();
}
async checkHealth(): Promise<TieredHealthStatus> {
const [mcpdHealth, instances] = await Promise.all([
this.checkMcpd(),
this.fetchInstances(),
]);
const mcplocalHealth = this.checkMcplocal();
return {
mcplocal: mcplocalHealth,
mcpd: mcpdHealth,
instances,
};
}
private checkMcplocal(): McplocalHealth {
const activeProvider = this.providerRegistry.getActive();
return {
status: 'healthy',
uptime: (Date.now() - this.startTime) / 1000,
llmProvider: activeProvider?.name ?? null,
};
}
private async checkMcpd(): Promise<McpdHealth> {
if (this.mcpdClient === null) {
return { status: 'disconnected', url: this.mcpdUrl };
}
try {
await this.mcpdClient.get<unknown>('/health');
return { status: 'connected', url: this.mcpdUrl };
} catch {
return { status: 'disconnected', url: this.mcpdUrl };
}
}
private async fetchInstances(): Promise<InstanceHealth[]> {
if (this.mcpdClient === null) {
return [];
}
try {
const response = await this.mcpdClient.get<{ instances: InstanceHealth[] }>('/instances');
return response.instances;
} catch {
return [];
}
}
}

View File

@@ -0,0 +1,32 @@
/** Configuration for the mcplocal HTTP server. */
export interface HttpConfig {
/** Port for the HTTP server (default: 3200) */
httpPort: number;
/** Host to bind to (default: 127.0.0.1) */
httpHost: string;
/** URL of the mcpd daemon (default: http://localhost:3100) */
mcpdUrl: string;
/** Bearer token for authenticating with mcpd */
mcpdToken: string;
/** Log level (default: info) */
logLevel: 'fatal' | 'error' | 'warn' | 'info' | 'debug' | 'trace';
}
const DEFAULT_HTTP_PORT = 3200;
const DEFAULT_HTTP_HOST = '127.0.0.1';
const DEFAULT_MCPD_URL = 'http://localhost:3100';
const DEFAULT_MCPD_TOKEN = '';
const DEFAULT_LOG_LEVEL = 'info';
export function loadHttpConfig(env: Record<string, string | undefined> = process.env): HttpConfig {
const portStr = env['MCPLOCAL_HTTP_PORT'];
const port = portStr !== undefined ? parseInt(portStr, 10) : DEFAULT_HTTP_PORT;
return {
httpPort: Number.isFinite(port) ? port : DEFAULT_HTTP_PORT,
httpHost: env['MCPLOCAL_HTTP_HOST'] ?? DEFAULT_HTTP_HOST,
mcpdUrl: env['MCPLOCAL_MCPD_URL'] ?? DEFAULT_MCPD_URL,
mcpdToken: env['MCPLOCAL_MCPD_TOKEN'] ?? DEFAULT_MCPD_TOKEN,
logLevel: (env['MCPLOCAL_LOG_LEVEL'] as HttpConfig['logLevel'] | undefined) ?? DEFAULT_LOG_LEVEL,
};
}

View File

@@ -0,0 +1,6 @@
export { createHttpServer } from './server.js';
export type { HttpServerDeps } from './server.js';
export { loadHttpConfig } from './config.js';
export type { HttpConfig } from './config.js';
export { McpdClient, AuthenticationError, ConnectionError } from './mcpd-client.js';
export { registerProxyRoutes } from './routes/proxy.js';

View File

@@ -0,0 +1,105 @@
/**
* HTTP client for communicating with the mcpd daemon.
* Wraps fetch calls with auth headers and error handling.
*/
/** Thrown when mcpd returns a 401 Unauthorized response. */
export class AuthenticationError extends Error {
constructor(message = 'Authentication failed: invalid or expired token') {
super(message);
this.name = 'AuthenticationError';
}
}
/** Thrown when mcpd is unreachable (connection refused, DNS failure, etc.). */
export class ConnectionError extends Error {
constructor(url: string, cause?: unknown) {
const msg = `Cannot connect to mcpd at ${url}`;
super(cause instanceof Error ? `${msg}: ${cause.message}` : msg);
this.name = 'ConnectionError';
}
}
export class McpdClient {
private readonly baseUrl: string;
private readonly token: string;
constructor(baseUrl: string, token: string) {
// Strip trailing slash for consistent URL joining
this.baseUrl = baseUrl.replace(/\/+$/, '');
this.token = token;
}
async get<T>(path: string): Promise<T> {
return this.request<T>('GET', path);
}
async post<T>(path: string, body?: unknown): Promise<T> {
return this.request<T>('POST', path, body);
}
async put<T>(path: string, body?: unknown): Promise<T> {
return this.request<T>('PUT', path, body);
}
async delete(path: string): Promise<void> {
await this.request<unknown>('DELETE', path);
}
/**
* Forward a raw request to mcpd. Returns the status code and body
* so the proxy route can relay them directly.
*/
async forward(
method: string,
path: string,
query: string,
body: unknown | undefined,
): Promise<{ status: number; body: unknown }> {
const url = `${this.baseUrl}${path}${query ? `?${query}` : ''}`;
const headers: Record<string, string> = {
'Authorization': `Bearer ${this.token}`,
'Accept': 'application/json',
};
const init: RequestInit = { method, headers };
if (body !== undefined && body !== null && method !== 'GET' && method !== 'HEAD') {
headers['Content-Type'] = 'application/json';
init.body = JSON.stringify(body);
}
let res: Response;
try {
res = await fetch(url, init);
} catch (err: unknown) {
throw new ConnectionError(this.baseUrl, err);
}
if (res.status === 401) {
throw new AuthenticationError();
}
const text = await res.text();
let parsed: unknown;
try {
parsed = JSON.parse(text);
} catch {
parsed = text;
}
return { status: res.status, body: parsed };
}
private async request<T>(method: string, path: string, body?: unknown): Promise<T> {
const result = await this.forward(method, path, '', body);
if (result.status >= 400) {
const detail = typeof result.body === 'object' && result.body !== null
? JSON.stringify(result.body)
: String(result.body);
throw new Error(`mcpd returned ${String(result.status)}: ${detail}`);
}
return result.body as T;
}
}

View File

@@ -0,0 +1,38 @@
/**
* Catch-all proxy route that forwards /api/v1/* requests to mcpd.
*/
import type { FastifyInstance } from 'fastify';
import { AuthenticationError, ConnectionError } from '../mcpd-client.js';
import type { McpdClient } from '../mcpd-client.js';
export function registerProxyRoutes(app: FastifyInstance, client: McpdClient): void {
app.all('/api/v1/*', async (request, reply) => {
const path = (request.url.split('?')[0]) ?? '/';
const querystring = request.url.includes('?')
? request.url.slice(request.url.indexOf('?') + 1)
: '';
const body = request.method !== 'GET' && request.method !== 'HEAD'
? (request.body as unknown)
: undefined;
try {
const result = await client.forward(request.method, path, querystring, body);
return reply.code(result.status).send(result.body);
} catch (err: unknown) {
if (err instanceof AuthenticationError) {
return reply.code(401).send({
error: 'unauthorized',
message: 'Authentication with mcpd failed. Run `mcpctl login` to refresh your token.',
});
}
if (err instanceof ConnectionError) {
return reply.code(503).send({
error: 'service_unavailable',
message: 'Cannot reach mcpd daemon. Is it running?',
});
}
throw err;
}
});
}

View File

@@ -0,0 +1,85 @@
import Fastify from 'fastify';
import type { FastifyInstance } from 'fastify';
import cors from '@fastify/cors';
import { APP_VERSION } from '@mcpctl/shared';
import type { HttpConfig } from './config.js';
import { McpdClient } from './mcpd-client.js';
import { registerProxyRoutes } from './routes/proxy.js';
import type { McpRouter } from '../router.js';
import type { HealthMonitor } from '../health.js';
import type { TieredHealthMonitor } from '../health/tiered.js';
export interface HttpServerDeps {
router: McpRouter;
healthMonitor?: HealthMonitor | undefined;
tieredHealthMonitor?: TieredHealthMonitor | undefined;
}
export async function createHttpServer(
config: HttpConfig,
deps: HttpServerDeps,
): Promise<FastifyInstance> {
const app = Fastify({
logger: {
level: config.logLevel,
},
});
await app.register(cors, {
origin: true,
methods: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH'],
});
// Health endpoint
app.get('/health', async (_request, reply) => {
const upstreams = deps.router.getUpstreamNames();
const healthStatuses = deps.healthMonitor
? deps.healthMonitor.getAllStatuses()
: undefined;
// Include tiered summary if available
let tieredSummary: { mcpd: string; llmProvider: string | null } | undefined;
if (deps.tieredHealthMonitor) {
const tiered = await deps.tieredHealthMonitor.checkHealth();
tieredSummary = {
mcpd: tiered.mcpd.status,
llmProvider: tiered.mcplocal.llmProvider,
};
}
reply.code(200).send({
status: 'healthy',
version: APP_VERSION,
uptime: process.uptime(),
timestamp: new Date().toISOString(),
upstreams: upstreams.length,
mcpdUrl: config.mcpdUrl,
...(healthStatuses !== undefined ? { health: healthStatuses } : {}),
...(tieredSummary !== undefined ? { tiered: tieredSummary } : {}),
});
});
// Detailed tiered health endpoint
app.get('/health/detailed', async (_request, reply) => {
if (!deps.tieredHealthMonitor) {
reply.code(503).send({
error: 'Tiered health monitor not configured',
});
return;
}
const status = await deps.tieredHealthMonitor.checkHealth();
reply.code(200).send(status);
});
// Liveness probe
app.get('/healthz', async (_request, reply) => {
reply.code(200).send({ status: 'ok' });
});
// Proxy management routes to mcpd
const mcpdClient = new McpdClient(config.mcpdUrl, config.mcpdToken);
registerProxyRoutes(app, mcpdClient);
return app;
}

View File

@@ -4,10 +4,15 @@ export { StdioProxyServer } from './server.js';
export { StdioUpstream, HttpUpstream } from './upstream/index.js';
export { HealthMonitor } from './health.js';
export type { HealthState, HealthStatus, HealthMonitorOptions } from './health.js';
export { TieredHealthMonitor } from './health/index.js';
export type { TieredHealthStatus, TieredHealthMonitorDeps, McplocalHealth, McpdHealth, InstanceHealth } from './health/index.js';
export { main } from './main.js';
export type { MainResult } from './main.js';
export { ProviderRegistry } from './providers/index.js';
export type { LlmProvider, CompletionOptions, CompletionResult, ChatMessage } from './providers/index.js';
export { OpenAiProvider, AnthropicProvider, OllamaProvider } from './providers/index.js';
export { OpenAiProvider, AnthropicProvider, OllamaProvider, GeminiCliProvider, DeepSeekProvider } from './providers/index.js';
export { createHttpServer, loadHttpConfig, McpdClient, AuthenticationError, ConnectionError, registerProxyRoutes } from './http/index.js';
export type { HttpConfig, HttpServerDeps } from './http/index.js';
export type {
JsonRpcRequest,
JsonRpcResponse,

View File

@@ -0,0 +1,96 @@
/**
* LRU cache for filter decisions.
*
* Caches whether a given tool name + response size combination should be
* filtered by the LLM pipeline. Avoids redundant LLM calls for repeated
* queries that produce similar-sized responses.
*/
export interface FilterCacheConfig {
/** Maximum number of entries in the cache (default 256) */
maxEntries: number;
/** TTL in milliseconds for cache entries (default 3_600_000 = 1 hour) */
ttlMs: number;
}
export const DEFAULT_FILTER_CACHE_CONFIG: FilterCacheConfig = {
maxEntries: 256,
ttlMs: 3_600_000,
};
interface CacheEntry {
shouldFilter: boolean;
createdAt: number;
}
/**
* Simple LRU cache for filter decisions keyed by tool name.
*
* Uses a Map to maintain insertion order for LRU eviction.
* No external dependencies.
*/
export class FilterCache {
private cache = new Map<string, CacheEntry>();
private readonly config: FilterCacheConfig;
constructor(config: Partial<FilterCacheConfig> = {}) {
this.config = { ...DEFAULT_FILTER_CACHE_CONFIG, ...config };
}
/**
* Look up a cached filter decision.
*
* @param toolName - The MCP tool name.
* @returns `true`/`false` if a cached decision exists, or `null` if no valid entry.
*/
shouldFilter(toolName: string): boolean | null {
const entry = this.cache.get(toolName);
if (!entry) return null;
// Check TTL expiration
if (Date.now() - entry.createdAt > this.config.ttlMs) {
this.cache.delete(toolName);
return null;
}
// Move to end for LRU freshness
this.cache.delete(toolName);
this.cache.set(toolName, entry);
return entry.shouldFilter;
}
/**
* Record a filter decision in the cache.
*
* @param toolName - The MCP tool name.
* @param shouldFilter - Whether the response should be filtered.
*/
recordDecision(toolName: string, shouldFilter: boolean): void {
// If already present, remove to refresh position
this.cache.delete(toolName);
// Evict oldest entry if at capacity
if (this.cache.size >= this.config.maxEntries) {
const oldest = this.cache.keys().next();
if (!oldest.done) {
this.cache.delete(oldest.value);
}
}
this.cache.set(toolName, {
shouldFilter,
createdAt: Date.now(),
});
}
/** Clear all cached entries. */
clear(): void {
this.cache.clear();
}
/** Number of entries currently in the cache. */
get size(): number {
return this.cache.size;
}
}

View File

@@ -0,0 +1,8 @@
export { LlmProcessor, DEFAULT_PROCESSOR_CONFIG } from './processor.js';
export type { LlmProcessorConfig, ProcessedRequest, FilteredResponse } from './processor.js';
export { RESPONSE_FILTER_SYSTEM_PROMPT, REQUEST_OPTIMIZATION_SYSTEM_PROMPT } from './prompts.js';
export { estimateTokens } from './token-counter.js';
export { FilterCache, DEFAULT_FILTER_CACHE_CONFIG } from './filter-cache.js';
export type { FilterCacheConfig } from './filter-cache.js';
export { FilterMetrics } from './metrics.js';
export type { FilterMetricsSnapshot } from './metrics.js';

View File

@@ -0,0 +1,83 @@
/**
* Metrics tracking for the LLM filter pipeline.
*
* Records token savings, cache efficiency, and filter latency to enable
* observability of the smart context optimization layer.
*/
export interface FilterMetricsSnapshot {
/** Total estimated tokens that entered the filter pipeline */
totalTokensProcessed: number;
/** Estimated tokens saved by filtering */
tokensSaved: number;
/** Number of cache hits (filter decision reused) */
cacheHits: number;
/** Number of cache misses (required fresh decision) */
cacheMisses: number;
/** Number of filter operations performed */
filterCount: number;
/** Average filter latency in milliseconds (0 if no operations) */
averageFilterLatencyMs: number;
}
/**
* Accumulates metrics for the LLM filter pipeline.
*
* Thread-safe for single-threaded Node.js usage. Call `getStats()` to
* retrieve a snapshot of current metrics.
*/
export class FilterMetrics {
private totalTokensProcessed = 0;
private tokensSaved = 0;
private cacheHits = 0;
private cacheMisses = 0;
private filterCount = 0;
private totalFilterLatencyMs = 0;
/**
* Record a single filter operation.
*
* @param originalTokens - Estimated tokens before filtering.
* @param filteredTokens - Estimated tokens after filtering.
* @param latencyMs - Time taken for the filter operation in ms.
*/
recordFilter(originalTokens: number, filteredTokens: number, latencyMs: number): void {
this.totalTokensProcessed += originalTokens;
this.tokensSaved += Math.max(0, originalTokens - filteredTokens);
this.filterCount++;
this.totalFilterLatencyMs += latencyMs;
}
/** Record a cache hit. */
recordCacheHit(): void {
this.cacheHits++;
}
/** Record a cache miss. */
recordCacheMiss(): void {
this.cacheMisses++;
}
/** Return a snapshot of all accumulated metrics. */
getStats(): FilterMetricsSnapshot {
return {
totalTokensProcessed: this.totalTokensProcessed,
tokensSaved: this.tokensSaved,
cacheHits: this.cacheHits,
cacheMisses: this.cacheMisses,
filterCount: this.filterCount,
averageFilterLatencyMs:
this.filterCount > 0 ? this.totalFilterLatencyMs / this.filterCount : 0,
};
}
/** Reset all metrics to zero. */
reset(): void {
this.totalTokensProcessed = 0;
this.tokensSaved = 0;
this.cacheHits = 0;
this.cacheMisses = 0;
this.filterCount = 0;
this.totalFilterLatencyMs = 0;
}
}

View File

@@ -0,0 +1,231 @@
import type { ProviderRegistry } from '../providers/registry.js';
import type { JsonRpcResponse } from '../types.js';
import { RESPONSE_FILTER_SYSTEM_PROMPT, REQUEST_OPTIMIZATION_SYSTEM_PROMPT } from './prompts.js';
import { estimateTokens } from './token-counter.js';
import { FilterCache } from './filter-cache.js';
import type { FilterCacheConfig } from './filter-cache.js';
import { FilterMetrics } from './metrics.js';
export interface LlmProcessorConfig {
/** Enable request preprocessing */
enablePreprocessing: boolean;
/** Enable response filtering */
enableFiltering: boolean;
/** Tool name patterns to skip (matched against namespaced name) */
excludeTools: string[];
/** Max tokens for LLM calls */
maxTokens: number;
/** Token threshold below which responses skip LLM filtering (default 250 tokens ~ 1000 chars) */
tokenThreshold: number;
/** Filter cache configuration (optional; omit to use defaults) */
filterCache?: FilterCacheConfig | undefined;
}
export const DEFAULT_PROCESSOR_CONFIG: LlmProcessorConfig = {
enablePreprocessing: false,
enableFiltering: true,
excludeTools: [],
maxTokens: 1024,
tokenThreshold: 250,
};
export interface ProcessedRequest {
optimized: boolean;
params: Record<string, unknown>;
}
export interface FilteredResponse {
filtered: boolean;
result: unknown;
originalSize: number;
filteredSize: number;
}
/**
* LLM pre-processing pipeline. Intercepts MCP tool calls and uses a local
* LLM to optimize requests and filter responses, reducing token usage for
* the upstream Claude model.
*
* Includes smart context optimization:
* - Token-based thresholds to skip filtering small responses
* - LRU cache for filter decisions on repeated tool calls
* - Metrics tracking for observability
*/
export class LlmProcessor {
private readonly filterCache: FilterCache;
private readonly metrics: FilterMetrics;
constructor(
private providers: ProviderRegistry,
private config: LlmProcessorConfig = DEFAULT_PROCESSOR_CONFIG,
) {
this.filterCache = new FilterCache(config.filterCache);
this.metrics = new FilterMetrics();
}
/** Methods that should never be preprocessed (protocol-level or simple CRUD) */
private static readonly BYPASS_METHODS = new Set([
'initialize',
'tools/list',
'resources/list',
'prompts/list',
'prompts/get',
'resources/subscribe',
'resources/unsubscribe',
]);
/** Simple operations that don't benefit from preprocessing */
private static readonly SIMPLE_OPERATIONS = new Set([
'create', 'delete', 'remove', 'subscribe', 'unsubscribe',
]);
shouldProcess(method: string, toolName?: string): boolean {
if (LlmProcessor.BYPASS_METHODS.has(method)) return false;
if (!toolName) return false;
// Check exclude list
if (this.config.excludeTools.some((pattern) => toolName.includes(pattern))) {
return false;
}
// Skip simple CRUD operations
const baseName = toolName.includes('/') ? toolName.split('/').pop()! : toolName;
for (const op of LlmProcessor.SIMPLE_OPERATIONS) {
if (baseName.startsWith(op)) return false;
}
return true;
}
/**
* Optimize request parameters using the active LLM provider.
* Falls back to original params if LLM is unavailable or fails.
*/
async preprocessRequest(toolName: string, params: Record<string, unknown>): Promise<ProcessedRequest> {
if (!this.config.enablePreprocessing) {
return { optimized: false, params };
}
const provider = this.providers.getActive();
if (!provider) {
return { optimized: false, params };
}
try {
const result = await provider.complete({
messages: [
{ role: 'system', content: REQUEST_OPTIMIZATION_SYSTEM_PROMPT },
{ role: 'user', content: `Tool: ${toolName}\nParameters: ${JSON.stringify(params)}` },
],
maxTokens: this.config.maxTokens,
temperature: 0,
});
const optimized = JSON.parse(result.content) as Record<string, unknown>;
return { optimized: true, params: optimized };
} catch {
// LLM failed, fall through to original params
return { optimized: false, params };
}
}
/**
* Filter/summarize a tool response using the active LLM provider.
* Falls back to original response if LLM is unavailable or fails.
*
* Uses token-based thresholds and an LRU filter cache to skip unnecessary
* LLM calls. Records metrics for every filter operation.
*/
async filterResponse(toolName: string, response: JsonRpcResponse): Promise<FilteredResponse> {
if (!this.config.enableFiltering) {
const raw = JSON.stringify(response.result);
return { filtered: false, result: response.result, originalSize: raw.length, filteredSize: raw.length };
}
const provider = this.providers.getActive();
if (!provider) {
const raw = JSON.stringify(response.result);
return { filtered: false, result: response.result, originalSize: raw.length, filteredSize: raw.length };
}
// Don't filter error responses
if (response.error) {
return { filtered: false, result: response.result, originalSize: 0, filteredSize: 0 };
}
const raw = JSON.stringify(response.result);
const tokens = estimateTokens(raw);
// Skip small responses below the token threshold
if (tokens < this.config.tokenThreshold) {
return { filtered: false, result: response.result, originalSize: raw.length, filteredSize: raw.length };
}
// Check filter cache for a prior decision on this tool
const cachedDecision = this.filterCache.shouldFilter(toolName);
if (cachedDecision !== null) {
this.metrics.recordCacheHit();
if (!cachedDecision) {
// Previously decided not to filter this tool's responses
return { filtered: false, result: response.result, originalSize: raw.length, filteredSize: raw.length };
}
} else {
this.metrics.recordCacheMiss();
}
const startTime = performance.now();
try {
const result = await provider.complete({
messages: [
{ role: 'system', content: RESPONSE_FILTER_SYSTEM_PROMPT },
{ role: 'user', content: `Tool: ${toolName}\nResponse (${raw.length} chars):\n${raw}` },
],
maxTokens: this.config.maxTokens,
temperature: 0,
});
const filtered = JSON.parse(result.content) as unknown;
const filteredStr = JSON.stringify(filtered);
const filteredTokens = estimateTokens(filteredStr);
const latencyMs = performance.now() - startTime;
this.metrics.recordFilter(tokens, filteredTokens, latencyMs);
// Cache the decision: if filtering actually saved tokens, remember to filter
const didSave = filteredStr.length < raw.length;
this.filterCache.recordDecision(toolName, didSave);
return {
filtered: true,
result: filtered,
originalSize: raw.length,
filteredSize: filteredStr.length,
};
} catch {
const latencyMs = performance.now() - startTime;
this.metrics.recordFilter(tokens, tokens, latencyMs);
// LLM failed — cache as "don't filter" to avoid repeated failures
this.filterCache.recordDecision(toolName, false);
// LLM failed, return original
return { filtered: false, result: response.result, originalSize: raw.length, filteredSize: raw.length };
}
}
/** Return a snapshot of filter pipeline metrics. */
getMetrics(): ReturnType<FilterMetrics['getStats']> {
return this.metrics.getStats();
}
/** Reset all metrics. */
resetMetrics(): void {
this.metrics.reset();
}
/** Clear the filter decision cache. */
clearFilterCache(): void {
this.filterCache.clear();
}
}

View File

@@ -0,0 +1,21 @@
/**
* System prompts for the LLM pre-processing pipeline.
*/
export const RESPONSE_FILTER_SYSTEM_PROMPT = `You are a data filtering assistant. Your job is to extract only the relevant information from MCP tool responses.
Rules:
- Remove redundant or verbose fields that aren't useful to the user's query
- Keep essential identifiers, names, statuses, and key metrics
- Preserve error messages and warnings in full
- If the response is already concise, return it unchanged
- Output valid JSON only, no markdown or explanations
- If you cannot parse the input, return it unchanged`;
export const REQUEST_OPTIMIZATION_SYSTEM_PROMPT = `You are a query optimization assistant. Your job is to optimize MCP tool call parameters.
Rules:
- Add appropriate filters or limits if the query is too broad
- Keep the original intent of the request
- Output valid JSON with the optimized parameters only, no markdown or explanations
- If no optimization is needed, return the original parameters unchanged`;

View File

@@ -0,0 +1,18 @@
/**
* Simple token estimation utility.
*
* Uses a heuristic of ~4 characters per token, which is a reasonable
* approximation for English text and JSON payloads. For more accurate
* counting, a tokenizer like tiktoken could be used instead.
*/
/**
* Estimate the number of tokens in a text string.
*
* @param text - The input text to estimate tokens for.
* @returns Estimated token count (minimum 0).
*/
export function estimateTokens(text: string): number {
if (text.length === 0) return 0;
return Math.ceil(text.length / 4);
}

View File

@@ -1,14 +1,26 @@
#!/usr/bin/env node
import { readFileSync } from 'node:fs';
import type { FastifyInstance } from 'fastify';
import type { ProxyConfig, UpstreamConfig } from './types.js';
import { McpRouter } from './router.js';
import { StdioProxyServer } from './server.js';
import { StdioUpstream } from './upstream/stdio.js';
import { HttpUpstream } from './upstream/http.js';
import { createHttpServer } from './http/server.js';
import { loadHttpConfig } from './http/config.js';
import type { HttpConfig } from './http/config.js';
function parseArgs(argv: string[]): { configPath: string | undefined; upstreams: string[] } {
interface ParsedArgs {
configPath: string | undefined;
upstreams: string[];
noHttp: boolean;
}
function parseArgs(argv: string[]): ParsedArgs {
let configPath: string | undefined;
const upstreams: string[] = [];
let noHttp = false;
for (let i = 2; i < argv.length; i++) {
const arg = argv[i];
if (arg === '--config' && i + 1 < argv.length) {
@@ -19,9 +31,11 @@ function parseArgs(argv: string[]): { configPath: string | undefined; upstreams:
upstreams.push(argv[++i]!);
} else if (arg?.startsWith('--upstream=')) {
upstreams.push(arg.slice('--upstream='.length));
} else if (arg === '--no-http') {
noHttp = true;
}
}
return { configPath, upstreams };
return { configPath, upstreams, noHttp };
}
function loadConfig(configPath: string): ProxyConfig {
@@ -36,8 +50,16 @@ function createUpstream(config: UpstreamConfig) {
return new HttpUpstream(config);
}
export async function main(argv: string[] = process.argv): Promise<{ router: McpRouter; server: StdioProxyServer }> {
export interface MainResult {
router: McpRouter;
server: StdioProxyServer;
httpServer: FastifyInstance | undefined;
httpConfig: HttpConfig;
}
export async function main(argv: string[] = process.argv): Promise<MainResult> {
const args = parseArgs(argv);
const httpConfig = loadHttpConfig();
let upstreamConfigs: UpstreamConfig[] = [];
@@ -85,10 +107,29 @@ export async function main(argv: string[] = process.argv): Promise<{ router: Mcp
router.addUpstream(upstream);
}
// Start stdio proxy server
const server = new StdioProxyServer(router);
server.start();
process.stderr.write(`mcpctl-proxy started with ${upstreamConfigs.length} upstream(s)\n`);
// Start HTTP server unless disabled
let httpServer: FastifyInstance | undefined;
if (!args.noHttp) {
httpServer = await createHttpServer(httpConfig, { router });
await httpServer.listen({ port: httpConfig.httpPort, host: httpConfig.httpHost });
process.stderr.write(`mcpctl-proxy HTTP server listening on ${httpConfig.httpHost}:${httpConfig.httpPort}\n`);
}
// Graceful shutdown
let shuttingDown = false;
const shutdown = async () => {
if (shuttingDown) return;
shuttingDown = true;
server.stop();
if (httpServer) {
await httpServer.close();
}
await router.closeAll();
process.exit(0);
};
@@ -96,10 +137,7 @@ export async function main(argv: string[] = process.argv): Promise<{ router: Mcp
process.on('SIGTERM', () => void shutdown());
process.on('SIGINT', () => void shutdown());
server.start();
process.stderr.write(`mcpctl-proxy started with ${upstreamConfigs.length} upstream(s)\n`);
return { router, server };
return { router, server, httpServer, httpConfig };
}
// Run when executed directly

View File

@@ -0,0 +1,191 @@
import https from 'node:https';
import type { LlmProvider, CompletionOptions, CompletionResult, ChatMessage, ToolCall } from './types.js';
export interface DeepSeekConfig {
apiKey: string;
baseUrl?: string;
defaultModel?: string;
}
interface DeepSeekMessage {
role: string;
content: string | null;
tool_calls?: Array<{
id: string;
type: 'function';
function: { name: string; arguments: string };
}>;
tool_call_id?: string;
name?: string;
}
/**
* DeepSeek provider using the OpenAI-compatible chat completions API.
* Endpoint: https://api.deepseek.com/v1/chat/completions
*/
export class DeepSeekProvider implements LlmProvider {
readonly name = 'deepseek';
private apiKey: string;
private baseUrl: string;
private defaultModel: string;
constructor(config: DeepSeekConfig) {
this.apiKey = config.apiKey;
this.baseUrl = (config.baseUrl ?? 'https://api.deepseek.com').replace(/\/$/, '');
this.defaultModel = config.defaultModel ?? 'deepseek-chat';
}
async complete(options: CompletionOptions): Promise<CompletionResult> {
const model = options.model ?? this.defaultModel;
const body: Record<string, unknown> = {
model,
messages: options.messages.map(toDeepSeekMessage),
};
if (options.temperature !== undefined) body.temperature = options.temperature;
if (options.maxTokens !== undefined) body.max_tokens = options.maxTokens;
if (options.tools && options.tools.length > 0) {
body.tools = options.tools.map((t) => ({
type: 'function',
function: {
name: t.name,
description: t.description,
parameters: t.inputSchema,
},
}));
}
const response = await this.request('/v1/chat/completions', body);
return parseResponse(response);
}
async listModels(): Promise<string[]> {
// DeepSeek doesn't have a public models listing endpoint;
// return well-known models.
return [
'deepseek-chat',
'deepseek-reasoner',
];
}
async isAvailable(): Promise<boolean> {
if (!this.apiKey) return false;
try {
// Send a minimal request to verify the API key
await this.complete({
messages: [{ role: 'user', content: 'hi' }],
maxTokens: 1,
});
return true;
} catch {
return false;
}
}
private request(path: string, body: unknown, method = 'POST'): Promise<unknown> {
return new Promise((resolve, reject) => {
const url = new URL(path, this.baseUrl);
const payload = body !== undefined ? JSON.stringify(body) : undefined;
const opts = {
hostname: url.hostname,
port: url.port || 443,
path: url.pathname,
method,
timeout: 120000,
headers: {
'Authorization': `Bearer ${this.apiKey}`,
'Content-Type': 'application/json',
...(payload ? { 'Content-Length': Buffer.byteLength(payload) } : {}),
},
};
const req = https.request(opts, (res) => {
const chunks: Buffer[] = [];
res.on('data', (chunk: Buffer) => chunks.push(chunk));
res.on('end', () => {
const raw = Buffer.concat(chunks).toString('utf-8');
// Handle rate limiting
if (res.statusCode === 429) {
const retryAfter = res.headers['retry-after'];
reject(new Error(`DeepSeek rate limit exceeded${retryAfter ? `. Retry after ${retryAfter}s` : ''}`));
return;
}
try {
resolve(JSON.parse(raw));
} catch {
reject(new Error(`Invalid JSON response from DeepSeek ${path}: ${raw.slice(0, 200)}`));
}
});
});
req.on('error', reject);
req.on('timeout', () => {
req.destroy();
reject(new Error('DeepSeek request timed out'));
});
if (payload) req.write(payload);
req.end();
});
}
}
function toDeepSeekMessage(msg: ChatMessage): DeepSeekMessage {
const result: DeepSeekMessage = {
role: msg.role,
content: msg.content,
};
if (msg.toolCallId !== undefined) result.tool_call_id = msg.toolCallId;
if (msg.name !== undefined) result.name = msg.name;
return result;
}
function parseResponse(raw: unknown): CompletionResult {
const data = raw as {
choices?: Array<{
message?: {
content?: string | null;
tool_calls?: Array<{
id: string;
function: { name: string; arguments: string };
}>;
};
finish_reason?: string;
}>;
usage?: {
prompt_tokens?: number;
completion_tokens?: number;
total_tokens?: number;
};
};
const choice = data.choices?.[0];
const toolCalls: ToolCall[] = (choice?.message?.tool_calls ?? []).map((tc) => ({
id: tc.id,
name: tc.function.name,
arguments: safeParse(tc.function.arguments),
}));
const finishReason = choice?.finish_reason === 'tool_calls' ? 'tool_calls' as const
: choice?.finish_reason === 'length' ? 'length' as const
: 'stop' as const;
return {
content: choice?.message?.content ?? '',
toolCalls,
usage: {
promptTokens: data.usage?.prompt_tokens ?? 0,
completionTokens: data.usage?.completion_tokens ?? 0,
totalTokens: data.usage?.total_tokens ?? 0,
},
finishReason,
};
}
function safeParse(json: string): Record<string, unknown> {
try {
return JSON.parse(json) as Record<string, unknown>;
} catch {
return {};
}
}

View File

@@ -0,0 +1,113 @@
import { spawn } from 'node:child_process';
import { execFile } from 'node:child_process';
import { promisify } from 'node:util';
import type { LlmProvider, CompletionOptions, CompletionResult } from './types.js';
const execFileAsync = promisify(execFile);
export interface GeminiCliConfig {
binaryPath?: string;
defaultModel?: string;
timeoutMs?: number;
}
/**
* Gemini CLI provider. Spawns the `gemini` binary in non-interactive mode
* using the -p (--prompt) flag and captures stdout.
*
* Note: This provider does not support tool calls since the CLI interface
* only returns text output. toolCalls will always be an empty array.
*/
export class GeminiCliProvider implements LlmProvider {
readonly name = 'gemini-cli';
private binaryPath: string;
private defaultModel: string;
private timeoutMs: number;
constructor(config?: GeminiCliConfig) {
this.binaryPath = config?.binaryPath ?? 'gemini';
this.defaultModel = config?.defaultModel ?? 'gemini-2.5-flash';
this.timeoutMs = config?.timeoutMs ?? 30000;
}
async complete(options: CompletionOptions): Promise<CompletionResult> {
const model = options.model ?? this.defaultModel;
// Build prompt from messages
const prompt = options.messages
.map((m) => {
if (m.role === 'system') return `System: ${m.content}`;
if (m.role === 'user') return m.content;
if (m.role === 'assistant') return `Assistant: ${m.content}`;
return m.content;
})
.join('\n\n');
const args = ['-p', prompt, '-m', model, '-o', 'text'];
const content = await this.spawn(args);
return {
content: content.trim(),
toolCalls: [],
usage: {
promptTokens: 0,
completionTokens: 0,
totalTokens: 0,
},
finishReason: 'stop',
};
}
async listModels(): Promise<string[]> {
// The Gemini CLI does not expose a model listing command;
// return well-known models.
return [
'gemini-2.5-flash',
'gemini-2.5-pro',
'gemini-2.0-flash',
];
}
async isAvailable(): Promise<boolean> {
try {
await execFileAsync(this.binaryPath, ['--version'], { timeout: 5000 });
return true;
} catch {
return false;
}
}
private spawn(args: string[]): Promise<string> {
return new Promise((resolve, reject) => {
const child = spawn(this.binaryPath, args, {
stdio: ['ignore', 'pipe', 'pipe'],
timeout: this.timeoutMs,
});
const stdoutChunks: Buffer[] = [];
const stderrChunks: Buffer[] = [];
child.stdout.on('data', (chunk: Buffer) => stdoutChunks.push(chunk));
child.stderr.on('data', (chunk: Buffer) => stderrChunks.push(chunk));
child.on('error', (err) => {
if ((err as NodeJS.ErrnoException).code === 'ENOENT') {
reject(new Error(`Gemini CLI binary not found at '${this.binaryPath}'. Install with: npm install -g @google/gemini-cli`));
} else {
reject(err);
}
});
child.on('close', (code) => {
const stdout = Buffer.concat(stdoutChunks).toString('utf-8');
if (code === 0) {
resolve(stdout);
} else {
const stderr = Buffer.concat(stderrChunks).toString('utf-8');
reject(new Error(`Gemini CLI exited with code ${code}: ${stderr.slice(0, 500)}`));
}
});
});
}
}

View File

@@ -5,4 +5,8 @@ export { AnthropicProvider } from './anthropic.js';
export type { AnthropicConfig } from './anthropic.js';
export { OllamaProvider } from './ollama.js';
export type { OllamaConfig } from './ollama.js';
export { GeminiCliProvider } from './gemini-cli.js';
export type { GeminiCliConfig } from './gemini-cli.js';
export { DeepSeekProvider } from './deepseek.js';
export type { DeepSeekConfig } from './deepseek.js';
export { ProviderRegistry } from './registry.js';

View File

@@ -1,4 +1,5 @@
import type { UpstreamConnection, JsonRpcRequest, JsonRpcResponse, JsonRpcNotification } from './types.js';
import type { LlmProcessor } from './llm/processor.js';
/**
* Routes MCP requests to the appropriate upstream server.
@@ -15,6 +16,11 @@ export class McpRouter {
private resourceToServer = new Map<string, string>();
private promptToServer = new Map<string, string>();
private notificationHandler: ((notification: JsonRpcNotification) => void) | null = null;
private llmProcessor: LlmProcessor | null = null;
setLlmProcessor(processor: LlmProcessor): void {
this.llmProcessor = processor;
}
addUpstream(connection: UpstreamConnection): void {
this.upstreams.set(connection.name, connection);
@@ -247,7 +253,7 @@ export class McpRouter {
}
case 'tools/call':
return this.routeNamespacedCall(request, 'name', this.toolToServer);
return this.routeToolCall(request);
case 'resources/list': {
const resources = await this.discoverResources();
@@ -286,6 +292,37 @@ export class McpRouter {
}
}
/**
* Route a tools/call request, optionally applying LLM pre/post-processing.
*/
private async routeToolCall(request: JsonRpcRequest): Promise<JsonRpcResponse> {
const params = request.params as Record<string, unknown> | undefined;
const toolName = params?.['name'] as string | undefined;
// If no processor or tool shouldn't be processed, route directly
if (!this.llmProcessor || !toolName || !this.llmProcessor.shouldProcess('tools/call', toolName)) {
return this.routeNamespacedCall(request, 'name', this.toolToServer);
}
// Preprocess request params
const toolParams = (params?.['arguments'] ?? {}) as Record<string, unknown>;
const processed = await this.llmProcessor.preprocessRequest(toolName, toolParams);
const processedRequest: JsonRpcRequest = processed.optimized
? { ...request, params: { ...params, arguments: processed.params } }
: request;
// Route to upstream
const response = await this.routeNamespacedCall(processedRequest, 'name', this.toolToServer);
// Filter response
if (response.error) return response;
const filtered = await this.llmProcessor.filterResponse(toolName, response);
if (filtered.filtered) {
return { ...response, result: filtered.result };
}
return response;
}
getUpstreamNames(): string[] {
return [...this.upstreams.keys()];
}

View File

@@ -1,2 +1,3 @@
export { StdioUpstream } from './stdio.js';
export { HttpUpstream } from './http.js';
export { McpdUpstream } from './mcpd.js';

View File

@@ -0,0 +1,68 @@
import type { UpstreamConnection, JsonRpcRequest, JsonRpcResponse } from '../types.js';
import type { McpdClient } from '../http/mcpd-client.js';
interface McpdProxyRequest {
serverId: string;
method: string;
params?: Record<string, unknown> | undefined;
}
interface McpdProxyResponse {
result?: unknown;
error?: { code: number; message: string; data?: unknown };
}
/**
* An upstream that routes MCP requests through mcpd's /api/v1/mcp/proxy endpoint.
* mcpd holds the credentials and manages the actual MCP server connections.
*/
export class McpdUpstream implements UpstreamConnection {
readonly name: string;
private alive = true;
constructor(
private serverId: string,
serverName: string,
private mcpdClient: McpdClient,
) {
this.name = serverName;
}
async send(request: JsonRpcRequest): Promise<JsonRpcResponse> {
if (!this.alive) {
return {
jsonrpc: '2.0',
id: request.id,
error: { code: -32603, message: `Upstream '${this.name}' is closed` },
};
}
const proxyRequest: McpdProxyRequest = {
serverId: this.serverId,
method: request.method,
params: request.params,
};
try {
const result = await this.mcpdClient.post<McpdProxyResponse>('/api/v1/mcp/proxy', proxyRequest);
if (result.error) {
return { jsonrpc: '2.0', id: request.id, error: result.error };
}
return { jsonrpc: '2.0', id: request.id, result: result.result };
} catch (err) {
return {
jsonrpc: '2.0',
id: request.id,
error: { code: -32603, message: `mcpd proxy error: ${(err as Error).message}` },
};
}
}
async close(): Promise<void> {
this.alive = false;
}
isAlive(): boolean {
return this.alive;
}
}

View File

@@ -0,0 +1,68 @@
import { describe, it, expect, vi } from 'vitest';
import { refreshUpstreams } from '../src/discovery.js';
import { McpRouter } from '../src/router.js';
function mockMcpdClient(servers: Array<{ id: string; name: string; transport: string }>) {
return {
baseUrl: 'http://test:3100',
token: 'test-token',
get: vi.fn(async () => servers),
post: vi.fn(async () => ({ result: {} })),
put: vi.fn(),
delete: vi.fn(),
forward: vi.fn(),
};
}
describe('refreshUpstreams', () => {
it('registers mcpd servers as upstreams', async () => {
const router = new McpRouter();
const client = mockMcpdClient([
{ id: 'srv-1', name: 'slack', transport: 'stdio' },
{ id: 'srv-2', name: 'github', transport: 'stdio' },
]);
const registered = await refreshUpstreams(router, client as any);
expect(registered).toEqual(['slack', 'github']);
expect(router.getUpstreamNames()).toContain('slack');
expect(router.getUpstreamNames()).toContain('github');
});
it('removes stale upstreams', async () => {
const router = new McpRouter();
// First refresh: 2 servers
const client1 = mockMcpdClient([
{ id: 'srv-1', name: 'slack', transport: 'stdio' },
{ id: 'srv-2', name: 'github', transport: 'stdio' },
]);
await refreshUpstreams(router, client1 as any);
expect(router.getUpstreamNames()).toHaveLength(2);
// Second refresh: only 1 server
const client2 = mockMcpdClient([
{ id: 'srv-1', name: 'slack', transport: 'stdio' },
]);
await refreshUpstreams(router, client2 as any);
expect(router.getUpstreamNames()).toEqual(['slack']);
});
it('does not duplicate existing upstreams', async () => {
const router = new McpRouter();
const client = mockMcpdClient([
{ id: 'srv-1', name: 'slack', transport: 'stdio' },
]);
await refreshUpstreams(router, client as any);
await refreshUpstreams(router, client as any);
expect(router.getUpstreamNames()).toEqual(['slack']);
});
it('handles empty server list', async () => {
const router = new McpRouter();
const client = mockMcpdClient([]);
const registered = await refreshUpstreams(router, client as any);
expect(registered).toEqual([]);
expect(router.getUpstreamNames()).toHaveLength(0);
});
});

View File

@@ -0,0 +1,112 @@
import { describe, it, expect, vi, afterEach } from 'vitest';
import { FilterCache, DEFAULT_FILTER_CACHE_CONFIG } from '../src/llm/filter-cache.js';
describe('FilterCache', () => {
afterEach(() => {
vi.restoreAllMocks();
});
it('returns null for unknown tool names', () => {
const cache = new FilterCache();
expect(cache.shouldFilter('unknown/tool')).toBeNull();
});
it('stores and retrieves filter decisions', () => {
const cache = new FilterCache();
cache.recordDecision('slack/search', true);
expect(cache.shouldFilter('slack/search')).toBe(true);
cache.recordDecision('github/list_repos', false);
expect(cache.shouldFilter('github/list_repos')).toBe(false);
});
it('updates existing entries on re-record', () => {
const cache = new FilterCache();
cache.recordDecision('slack/search', true);
expect(cache.shouldFilter('slack/search')).toBe(true);
cache.recordDecision('slack/search', false);
expect(cache.shouldFilter('slack/search')).toBe(false);
});
it('evicts oldest entry when at capacity', () => {
const cache = new FilterCache({ maxEntries: 3 });
cache.recordDecision('tool-a', true);
cache.recordDecision('tool-b', false);
cache.recordDecision('tool-c', true);
expect(cache.size).toBe(3);
// Adding a 4th should evict 'tool-a' (oldest)
cache.recordDecision('tool-d', false);
expect(cache.size).toBe(3);
expect(cache.shouldFilter('tool-a')).toBeNull();
expect(cache.shouldFilter('tool-b')).toBe(false);
expect(cache.shouldFilter('tool-d')).toBe(false);
});
it('refreshes LRU position on access', () => {
const cache = new FilterCache({ maxEntries: 3 });
cache.recordDecision('tool-a', true);
cache.recordDecision('tool-b', false);
cache.recordDecision('tool-c', true);
// Access tool-a to refresh it
cache.shouldFilter('tool-a');
// Now add tool-d — tool-b should be evicted (oldest unreferenced)
cache.recordDecision('tool-d', false);
expect(cache.shouldFilter('tool-a')).toBe(true);
expect(cache.shouldFilter('tool-b')).toBeNull();
});
it('expires entries after TTL', () => {
const now = Date.now();
vi.spyOn(Date, 'now').mockReturnValue(now);
const cache = new FilterCache({ ttlMs: 1000 });
cache.recordDecision('slack/search', true);
expect(cache.shouldFilter('slack/search')).toBe(true);
// Advance time past TTL
vi.spyOn(Date, 'now').mockReturnValue(now + 1001);
expect(cache.shouldFilter('slack/search')).toBeNull();
// Entry should be removed
expect(cache.size).toBe(0);
});
it('does not expire entries within TTL', () => {
const now = Date.now();
vi.spyOn(Date, 'now').mockReturnValue(now);
const cache = new FilterCache({ ttlMs: 1000 });
cache.recordDecision('slack/search', true);
// Advance time within TTL
vi.spyOn(Date, 'now').mockReturnValue(now + 999);
expect(cache.shouldFilter('slack/search')).toBe(true);
});
it('clears all entries', () => {
const cache = new FilterCache();
cache.recordDecision('tool-a', true);
cache.recordDecision('tool-b', false);
expect(cache.size).toBe(2);
cache.clear();
expect(cache.size).toBe(0);
expect(cache.shouldFilter('tool-a')).toBeNull();
});
it('uses default config values', () => {
const cache = new FilterCache();
// Should support the default number of entries without issue
for (let i = 0; i < DEFAULT_FILTER_CACHE_CONFIG.maxEntries; i++) {
cache.recordDecision(`tool-${i}`, true);
}
expect(cache.size).toBe(DEFAULT_FILTER_CACHE_CONFIG.maxEntries);
// One more should trigger eviction
cache.recordDecision('extra-tool', true);
expect(cache.size).toBe(DEFAULT_FILTER_CACHE_CONFIG.maxEntries);
});
});

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,283 @@
import { describe, it, expect, vi } from 'vitest';
import { LlmProcessor, DEFAULT_PROCESSOR_CONFIG } from '../src/llm/processor.js';
import { ProviderRegistry } from '../src/providers/registry.js';
import type { LlmProvider, CompletionResult } from '../src/providers/types.js';
function mockProvider(responses: string[]): LlmProvider {
let callIndex = 0;
return {
name: 'mock',
async complete(): Promise<CompletionResult> {
const content = responses[callIndex] ?? '{}';
callIndex++;
return {
content,
toolCalls: [],
usage: { promptTokens: 10, completionTokens: 5, totalTokens: 15 },
finishReason: 'stop',
};
},
async listModels() { return ['mock-1']; },
async isAvailable() { return true; },
};
}
function makeRegistry(provider?: LlmProvider): ProviderRegistry {
const registry = new ProviderRegistry();
if (provider) {
registry.register(provider);
}
return registry;
}
describe('LlmProcessor.shouldProcess', () => {
it('bypasses protocol-level methods', () => {
const proc = new LlmProcessor(makeRegistry());
expect(proc.shouldProcess('initialize')).toBe(false);
expect(proc.shouldProcess('tools/list')).toBe(false);
expect(proc.shouldProcess('resources/list')).toBe(false);
expect(proc.shouldProcess('prompts/list')).toBe(false);
});
it('returns false when no tool name', () => {
const proc = new LlmProcessor(makeRegistry());
expect(proc.shouldProcess('tools/call')).toBe(false);
});
it('returns true for normal tool calls', () => {
const proc = new LlmProcessor(makeRegistry());
expect(proc.shouldProcess('tools/call', 'slack/search_messages')).toBe(true);
});
it('skips excluded tools', () => {
const proc = new LlmProcessor(makeRegistry(), {
...DEFAULT_PROCESSOR_CONFIG,
excludeTools: ['slack'],
});
expect(proc.shouldProcess('tools/call', 'slack/search_messages')).toBe(false);
expect(proc.shouldProcess('tools/call', 'github/search')).toBe(true);
});
it('skips simple CRUD operations', () => {
const proc = new LlmProcessor(makeRegistry());
expect(proc.shouldProcess('tools/call', 'slack/create_channel')).toBe(false);
expect(proc.shouldProcess('tools/call', 'slack/delete_message')).toBe(false);
expect(proc.shouldProcess('tools/call', 'slack/remove_user')).toBe(false);
});
});
describe('LlmProcessor.preprocessRequest', () => {
it('returns original params when preprocessing disabled', async () => {
const proc = new LlmProcessor(makeRegistry(mockProvider(['{}'])), {
...DEFAULT_PROCESSOR_CONFIG,
enablePreprocessing: false,
});
const result = await proc.preprocessRequest('slack/search', { query: 'test' });
expect(result.optimized).toBe(false);
expect(result.params).toEqual({ query: 'test' });
});
it('returns original params when no provider', async () => {
const proc = new LlmProcessor(makeRegistry(), {
...DEFAULT_PROCESSOR_CONFIG,
enablePreprocessing: true,
});
const result = await proc.preprocessRequest('slack/search', { query: 'test' });
expect(result.optimized).toBe(false);
});
it('optimizes params with LLM', async () => {
const provider = mockProvider([JSON.stringify({ query: 'test', limit: 10 })]);
const proc = new LlmProcessor(makeRegistry(provider), {
...DEFAULT_PROCESSOR_CONFIG,
enablePreprocessing: true,
});
const result = await proc.preprocessRequest('slack/search', { query: 'test' });
expect(result.optimized).toBe(true);
expect(result.params).toEqual({ query: 'test', limit: 10 });
});
it('falls back on LLM error', async () => {
const badProvider: LlmProvider = {
name: 'bad',
async complete() { throw new Error('LLM down'); },
async listModels() { return []; },
async isAvailable() { return false; },
};
const proc = new LlmProcessor(makeRegistry(badProvider), {
...DEFAULT_PROCESSOR_CONFIG,
enablePreprocessing: true,
});
const result = await proc.preprocessRequest('slack/search', { query: 'test' });
expect(result.optimized).toBe(false);
expect(result.params).toEqual({ query: 'test' });
});
});
describe('LlmProcessor.filterResponse', () => {
it('returns original when filtering disabled', async () => {
const proc = new LlmProcessor(makeRegistry(mockProvider([])), {
...DEFAULT_PROCESSOR_CONFIG,
enableFiltering: false,
});
const response = { jsonrpc: '2.0' as const, id: '1', result: { data: 'big' } };
const result = await proc.filterResponse('slack/search', response);
expect(result.filtered).toBe(false);
});
it('returns original when no provider', async () => {
const proc = new LlmProcessor(makeRegistry());
const response = { jsonrpc: '2.0' as const, id: '1', result: { data: 'x'.repeat(600) } };
const result = await proc.filterResponse('slack/search', response);
expect(result.filtered).toBe(false);
});
it('skips small responses below token threshold', async () => {
const proc = new LlmProcessor(makeRegistry(mockProvider([])));
// With default tokenThreshold=250, any response < 1000 chars (~250 tokens) is skipped
const response = { jsonrpc: '2.0' as const, id: '1', result: { data: 'small' } };
const result = await proc.filterResponse('slack/search', response);
expect(result.filtered).toBe(false);
});
it('skips error responses', async () => {
const proc = new LlmProcessor(makeRegistry(mockProvider([])));
const response = { jsonrpc: '2.0' as const, id: '1', error: { code: -1, message: 'fail' } };
const result = await proc.filterResponse('slack/search', response);
expect(result.filtered).toBe(false);
});
it('filters large responses with LLM', async () => {
const largeData = { items: Array.from({ length: 50 }, (_, i) => ({ id: i, name: `item-${i}`, extra: 'x'.repeat(20) })) };
const filteredData = { items: [{ id: 0, name: 'item-0' }, { id: 1, name: 'item-1' }] };
const provider = mockProvider([JSON.stringify(filteredData)]);
const proc = new LlmProcessor(makeRegistry(provider));
const response = { jsonrpc: '2.0' as const, id: '1', result: largeData };
const result = await proc.filterResponse('slack/search', response);
expect(result.filtered).toBe(true);
expect(result.filteredSize).toBeLessThan(result.originalSize);
});
it('falls back on LLM error', async () => {
const badProvider: LlmProvider = {
name: 'bad',
async complete() { throw new Error('LLM down'); },
async listModels() { return []; },
async isAvailable() { return false; },
};
const largeData = { items: Array.from({ length: 50 }, (_, i) => ({ id: i, extra: 'x'.repeat(20) })) };
const proc = new LlmProcessor(makeRegistry(badProvider));
const response = { jsonrpc: '2.0' as const, id: '1', result: largeData };
const result = await proc.filterResponse('slack/search', response);
expect(result.filtered).toBe(false);
expect(result.result).toEqual(largeData);
});
it('respects custom tokenThreshold', async () => {
// Set a very high threshold so that even "big" responses are skipped
const proc = new LlmProcessor(makeRegistry(mockProvider([])), {
...DEFAULT_PROCESSOR_CONFIG,
tokenThreshold: 10_000,
});
const largeData = { items: Array.from({ length: 50 }, (_, i) => ({ id: i, name: `item-${i}` })) };
const response = { jsonrpc: '2.0' as const, id: '1', result: largeData };
const result = await proc.filterResponse('slack/search', response);
expect(result.filtered).toBe(false);
});
it('uses filter cache to skip repeated filtering', async () => {
// First call: LLM returns same-size data => cache records shouldFilter=false
const largeData = { items: Array.from({ length: 50 }, (_, i) => ({ id: i, extra: 'x'.repeat(20) })) };
const raw = JSON.stringify(largeData);
// Return something larger so the cache stores shouldFilter=false (filtered not smaller)
const notSmaller = JSON.stringify(largeData);
const provider = mockProvider([notSmaller]);
const proc = new LlmProcessor(makeRegistry(provider));
const response = { jsonrpc: '2.0' as const, id: '1', result: largeData };
// First call goes to LLM
await proc.filterResponse('slack/search', response);
// Second call should hit cache (shouldFilter=false) and skip LLM
const result2 = await proc.filterResponse('slack/search', response);
expect(result2.filtered).toBe(false);
const metrics = proc.getMetrics();
expect(metrics.cacheHits).toBeGreaterThanOrEqual(1);
});
it('records metrics on filter operations', async () => {
const largeData = { items: Array.from({ length: 50 }, (_, i) => ({ id: i, name: `item-${i}`, extra: 'x'.repeat(20) })) };
const filteredData = { items: [{ id: 0, name: 'item-0' }] };
const provider = mockProvider([JSON.stringify(filteredData)]);
const proc = new LlmProcessor(makeRegistry(provider));
const response = { jsonrpc: '2.0' as const, id: '1', result: largeData };
await proc.filterResponse('slack/search', response);
const metrics = proc.getMetrics();
expect(metrics.filterCount).toBe(1);
expect(metrics.totalTokensProcessed).toBeGreaterThan(0);
expect(metrics.tokensSaved).toBeGreaterThan(0);
expect(metrics.cacheMisses).toBe(1);
});
it('records metrics even on LLM failure', async () => {
const badProvider: LlmProvider = {
name: 'bad',
async complete() { throw new Error('LLM down'); },
async listModels() { return []; },
async isAvailable() { return false; },
};
const largeData = { items: Array.from({ length: 50 }, (_, i) => ({ id: i, extra: 'x'.repeat(20) })) };
const proc = new LlmProcessor(makeRegistry(badProvider));
const response = { jsonrpc: '2.0' as const, id: '1', result: largeData };
await proc.filterResponse('slack/search', response);
const metrics = proc.getMetrics();
expect(metrics.filterCount).toBe(1);
expect(metrics.totalTokensProcessed).toBeGreaterThan(0);
// No tokens saved because filter failed
expect(metrics.tokensSaved).toBe(0);
});
});
describe('LlmProcessor metrics and cache management', () => {
it('exposes metrics via getMetrics()', () => {
const proc = new LlmProcessor(makeRegistry());
const metrics = proc.getMetrics();
expect(metrics.totalTokensProcessed).toBe(0);
expect(metrics.filterCount).toBe(0);
});
it('resets metrics', async () => {
const largeData = { items: Array.from({ length: 50 }, (_, i) => ({ id: i, extra: 'x'.repeat(20) })) };
const provider = mockProvider([JSON.stringify({ summary: 'ok' })]);
const proc = new LlmProcessor(makeRegistry(provider));
const response = { jsonrpc: '2.0' as const, id: '1', result: largeData };
await proc.filterResponse('slack/search', response);
expect(proc.getMetrics().filterCount).toBe(1);
proc.resetMetrics();
expect(proc.getMetrics().filterCount).toBe(0);
});
it('clears filter cache', async () => {
const largeData = { items: Array.from({ length: 50 }, (_, i) => ({ id: i, extra: 'x'.repeat(20) })) };
const filteredData = { items: [{ id: 0 }] };
// Two responses needed: first call filters, second call after cache clear also filters
const provider = mockProvider([JSON.stringify(filteredData), JSON.stringify(filteredData)]);
const proc = new LlmProcessor(makeRegistry(provider));
const response = { jsonrpc: '2.0' as const, id: '1', result: largeData };
await proc.filterResponse('slack/search', response);
proc.clearFilterCache();
// After clearing cache, should get a cache miss again
proc.resetMetrics();
await proc.filterResponse('slack/search', response);
expect(proc.getMetrics().cacheMisses).toBe(1);
});
});

View File

@@ -0,0 +1,110 @@
import { describe, it, expect, vi } from 'vitest';
import { McpdUpstream } from '../src/upstream/mcpd.js';
import type { JsonRpcRequest } from '../src/types.js';
function mockMcpdClient(responses: Map<string, unknown> = new Map()) {
return {
baseUrl: 'http://test:3100',
token: 'test-token',
get: vi.fn(),
post: vi.fn(async (_path: string, body: unknown) => {
const req = body as { serverId: string; method: string };
const key = `${req.serverId}:${req.method}`;
if (responses.has(key)) {
return responses.get(key);
}
return { result: { ok: true } };
}),
put: vi.fn(),
delete: vi.fn(),
forward: vi.fn(),
};
}
describe('McpdUpstream', () => {
it('sends tool calls via mcpd proxy', async () => {
const client = mockMcpdClient(new Map([
['srv-1:tools/call', { result: { content: [{ type: 'text', text: 'hello' }] } }],
]));
const upstream = new McpdUpstream('srv-1', 'slack', client as any);
const request: JsonRpcRequest = {
jsonrpc: '2.0',
id: '1',
method: 'tools/call',
params: { name: 'search', arguments: { query: 'test' } },
};
const response = await upstream.send(request);
expect(response.result).toEqual({ content: [{ type: 'text', text: 'hello' }] });
expect(client.post).toHaveBeenCalledWith('/api/v1/mcp/proxy', {
serverId: 'srv-1',
method: 'tools/call',
params: { name: 'search', arguments: { query: 'test' } },
});
});
it('sends tools/list via mcpd proxy', async () => {
const client = mockMcpdClient(new Map([
['srv-1:tools/list', { result: { tools: [{ name: 'search', description: 'Search' }] } }],
]));
const upstream = new McpdUpstream('srv-1', 'slack', client as any);
const request: JsonRpcRequest = {
jsonrpc: '2.0',
id: '2',
method: 'tools/list',
};
const response = await upstream.send(request);
expect(response.result).toEqual({ tools: [{ name: 'search', description: 'Search' }] });
});
it('returns error when mcpd fails', async () => {
const client = mockMcpdClient();
client.post.mockRejectedValue(new Error('connection refused'));
const upstream = new McpdUpstream('srv-1', 'slack', client as any);
const request: JsonRpcRequest = { jsonrpc: '2.0', id: '3', method: 'tools/list' };
const response = await upstream.send(request);
expect(response.error).toBeDefined();
expect(response.error!.message).toContain('mcpd proxy error');
});
it('returns error when upstream is closed', async () => {
const client = mockMcpdClient();
const upstream = new McpdUpstream('srv-1', 'slack', client as any);
await upstream.close();
const request: JsonRpcRequest = { jsonrpc: '2.0', id: '4', method: 'tools/list' };
const response = await upstream.send(request);
expect(response.error).toBeDefined();
expect(response.error!.message).toContain('closed');
});
it('reports alive status correctly', async () => {
const client = mockMcpdClient();
const upstream = new McpdUpstream('srv-1', 'slack', client as any);
expect(upstream.isAlive()).toBe(true);
await upstream.close();
expect(upstream.isAlive()).toBe(false);
});
it('relays error responses from mcpd', async () => {
const client = mockMcpdClient(new Map([
['srv-1:tools/call', { error: { code: -32601, message: 'Tool not found' } }],
]));
const upstream = new McpdUpstream('srv-1', 'slack', client as any);
const request: JsonRpcRequest = {
jsonrpc: '2.0',
id: '5',
method: 'tools/call',
params: { name: 'nonexistent' },
};
const response = await upstream.send(request);
expect(response.error).toEqual({ code: -32601, message: 'Tool not found' });
});
});

View File

@@ -0,0 +1,93 @@
import { describe, it, expect } from 'vitest';
import { FilterMetrics } from '../src/llm/metrics.js';
describe('FilterMetrics', () => {
it('starts with zeroed stats', () => {
const m = new FilterMetrics();
const stats = m.getStats();
expect(stats.totalTokensProcessed).toBe(0);
expect(stats.tokensSaved).toBe(0);
expect(stats.cacheHits).toBe(0);
expect(stats.cacheMisses).toBe(0);
expect(stats.filterCount).toBe(0);
expect(stats.averageFilterLatencyMs).toBe(0);
});
it('records filter operations and accumulates tokens', () => {
const m = new FilterMetrics();
m.recordFilter(500, 200, 50);
m.recordFilter(300, 100, 30);
const stats = m.getStats();
expect(stats.totalTokensProcessed).toBe(800);
expect(stats.tokensSaved).toBe(500); // (500-200) + (300-100)
expect(stats.filterCount).toBe(2);
expect(stats.averageFilterLatencyMs).toBe(40); // (50+30)/2
});
it('does not allow negative token savings', () => {
const m = new FilterMetrics();
// Filtered output is larger than original (edge case)
m.recordFilter(100, 200, 10);
const stats = m.getStats();
expect(stats.totalTokensProcessed).toBe(100);
expect(stats.tokensSaved).toBe(0); // clamped to 0
});
it('records cache hits and misses independently', () => {
const m = new FilterMetrics();
m.recordCacheHit();
m.recordCacheHit();
m.recordCacheMiss();
const stats = m.getStats();
expect(stats.cacheHits).toBe(2);
expect(stats.cacheMisses).toBe(1);
});
it('computes average latency correctly', () => {
const m = new FilterMetrics();
m.recordFilter(100, 50, 10);
m.recordFilter(100, 50, 20);
m.recordFilter(100, 50, 30);
expect(m.getStats().averageFilterLatencyMs).toBe(20);
});
it('returns 0 average latency when no filter operations', () => {
const m = new FilterMetrics();
// Only cache operations, no filter calls
m.recordCacheHit();
expect(m.getStats().averageFilterLatencyMs).toBe(0);
});
it('resets all metrics to zero', () => {
const m = new FilterMetrics();
m.recordFilter(500, 200, 50);
m.recordCacheHit();
m.recordCacheMiss();
m.reset();
const stats = m.getStats();
expect(stats.totalTokensProcessed).toBe(0);
expect(stats.tokensSaved).toBe(0);
expect(stats.cacheHits).toBe(0);
expect(stats.cacheMisses).toBe(0);
expect(stats.filterCount).toBe(0);
expect(stats.averageFilterLatencyMs).toBe(0);
});
it('returns independent snapshots', () => {
const m = new FilterMetrics();
m.recordFilter(100, 50, 10);
const snap1 = m.getStats();
m.recordFilter(200, 100, 20);
const snap2 = m.getStats();
// snap1 should not have been mutated
expect(snap1.totalTokensProcessed).toBe(100);
expect(snap2.totalTokensProcessed).toBe(300);
});
});

View File

@@ -0,0 +1,304 @@
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { TieredHealthMonitor } from '../src/health/tiered.js';
import type { TieredHealthMonitorDeps } from '../src/health/tiered.js';
import type { McpdClient } from '../src/http/mcpd-client.js';
import { ProviderRegistry } from '../src/providers/registry.js';
import type { LlmProvider } from '../src/providers/types.js';
function mockMcpdClient(overrides?: {
getResult?: unknown;
getFails?: boolean;
instancesResult?: { instances: Array<{ name: string; status: string }> };
instancesFails?: boolean;
}): McpdClient {
const client = {
get: vi.fn(async (path: string) => {
if (path === '/health') {
if (overrides?.getFails) {
throw new Error('Connection refused');
}
return overrides?.getResult ?? { status: 'ok' };
}
if (path === '/instances') {
if (overrides?.instancesFails) {
throw new Error('Connection refused');
}
return overrides?.instancesResult ?? { instances: [] };
}
return {};
}),
post: vi.fn(),
put: vi.fn(),
delete: vi.fn(),
forward: vi.fn(),
} as unknown as McpdClient;
return client;
}
function mockLlmProvider(name: string): LlmProvider {
return {
name,
complete: vi.fn(),
listModels: vi.fn(async () => []),
isAvailable: vi.fn(async () => true),
};
}
describe('TieredHealthMonitor', () => {
let providerRegistry: ProviderRegistry;
beforeEach(() => {
providerRegistry = new ProviderRegistry();
});
describe('mcplocal health', () => {
it('reports healthy status with uptime', async () => {
const monitor = new TieredHealthMonitor({
mcpdClient: null,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.mcplocal.status).toBe('healthy');
expect(result.mcplocal.uptime).toBeGreaterThanOrEqual(0);
});
it('reports null llmProvider when none registered', async () => {
const monitor = new TieredHealthMonitor({
mcpdClient: null,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.mcplocal.llmProvider).toBeNull();
});
it('reports active llmProvider name when one is registered', async () => {
const provider = mockLlmProvider('openai');
providerRegistry.register(provider);
const monitor = new TieredHealthMonitor({
mcpdClient: null,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.mcplocal.llmProvider).toBe('openai');
});
it('reports the currently active provider when multiple registered', async () => {
providerRegistry.register(mockLlmProvider('openai'));
providerRegistry.register(mockLlmProvider('anthropic'));
providerRegistry.setActive('anthropic');
const monitor = new TieredHealthMonitor({
mcpdClient: null,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.mcplocal.llmProvider).toBe('anthropic');
});
});
describe('mcpd health', () => {
it('reports connected when mcpd /health responds successfully', async () => {
const client = mockMcpdClient();
const monitor = new TieredHealthMonitor({
mcpdClient: client,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.mcpd.status).toBe('connected');
expect(result.mcpd.url).toBe('http://localhost:3100');
});
it('reports disconnected when mcpd /health throws', async () => {
const client = mockMcpdClient({ getFails: true });
const monitor = new TieredHealthMonitor({
mcpdClient: client,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.mcpd.status).toBe('disconnected');
expect(result.mcpd.url).toBe('http://localhost:3100');
});
it('reports disconnected when mcpdClient is null', async () => {
const monitor = new TieredHealthMonitor({
mcpdClient: null,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.mcpd.status).toBe('disconnected');
expect(result.mcpd.url).toBe('http://localhost:3100');
});
it('includes the configured mcpd URL in the response', async () => {
const monitor = new TieredHealthMonitor({
mcpdClient: null,
providerRegistry,
mcpdUrl: 'http://custom-host:9999',
});
const result = await monitor.checkHealth();
expect(result.mcpd.url).toBe('http://custom-host:9999');
});
});
describe('instances', () => {
it('returns instances from mcpd /instances endpoint', async () => {
const client = mockMcpdClient({
instancesResult: {
instances: [
{ name: 'slack', status: 'running' },
{ name: 'github', status: 'stopped' },
],
},
});
const monitor = new TieredHealthMonitor({
mcpdClient: client,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.instances).toHaveLength(2);
expect(result.instances[0]).toEqual({ name: 'slack', status: 'running' });
expect(result.instances[1]).toEqual({ name: 'github', status: 'stopped' });
});
it('returns empty array when mcpdClient is null', async () => {
const monitor = new TieredHealthMonitor({
mcpdClient: null,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.instances).toEqual([]);
});
it('returns empty array when /instances request fails', async () => {
const client = mockMcpdClient({ instancesFails: true });
const monitor = new TieredHealthMonitor({
mcpdClient: client,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.instances).toEqual([]);
});
it('returns empty array when mcpd has no instances', async () => {
const client = mockMcpdClient({
instancesResult: { instances: [] },
});
const monitor = new TieredHealthMonitor({
mcpdClient: client,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.instances).toEqual([]);
});
});
describe('full integration', () => {
it('returns complete tiered status with all sections', async () => {
providerRegistry.register(mockLlmProvider('openai'));
const client = mockMcpdClient({
instancesResult: {
instances: [
{ name: 'slack', status: 'running' },
],
},
});
const monitor = new TieredHealthMonitor({
mcpdClient: client,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
// Verify structure
expect(result).toHaveProperty('mcplocal');
expect(result).toHaveProperty('mcpd');
expect(result).toHaveProperty('instances');
// mcplocal
expect(result.mcplocal.status).toBe('healthy');
expect(typeof result.mcplocal.uptime).toBe('number');
expect(result.mcplocal.llmProvider).toBe('openai');
// mcpd
expect(result.mcpd.status).toBe('connected');
// instances
expect(result.instances).toHaveLength(1);
expect(result.instances[0]?.name).toBe('slack');
});
it('handles degraded scenario: no mcpd, no provider', async () => {
const monitor = new TieredHealthMonitor({
mcpdClient: null,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.mcplocal.status).toBe('healthy');
expect(result.mcplocal.llmProvider).toBeNull();
expect(result.mcpd.status).toBe('disconnected');
expect(result.instances).toEqual([]);
});
it('handles mcpd connected but instances endpoint failing', async () => {
const client = mockMcpdClient({ instancesFails: true });
const monitor = new TieredHealthMonitor({
mcpdClient: client,
providerRegistry,
mcpdUrl: 'http://localhost:3100',
});
const result = await monitor.checkHealth();
expect(result.mcpd.status).toBe('connected');
expect(result.instances).toEqual([]);
});
});
});

View File

@@ -0,0 +1,45 @@
import { describe, it, expect } from 'vitest';
import { estimateTokens } from '../src/llm/token-counter.js';
describe('estimateTokens', () => {
it('returns 0 for empty string', () => {
expect(estimateTokens('')).toBe(0);
});
it('returns 1 for strings of 1-4 characters', () => {
expect(estimateTokens('a')).toBe(1);
expect(estimateTokens('ab')).toBe(1);
expect(estimateTokens('abc')).toBe(1);
expect(estimateTokens('abcd')).toBe(1);
});
it('returns 2 for strings of 5-8 characters', () => {
expect(estimateTokens('abcde')).toBe(2);
expect(estimateTokens('abcdefgh')).toBe(2);
});
it('estimates roughly 4 chars per token for longer text', () => {
const text = 'a'.repeat(1000);
expect(estimateTokens(text)).toBe(250);
});
it('rounds up partial tokens', () => {
// 7 chars / 4 = 1.75 -> ceil = 2
expect(estimateTokens('abcdefg')).toBe(2);
// 9 chars / 4 = 2.25 -> ceil = 3
expect(estimateTokens('abcdefghi')).toBe(3);
});
it('handles JSON payloads', () => {
const json = JSON.stringify({ key: 'value', nested: { a: 1, b: [1, 2, 3] } });
const expected = Math.ceil(json.length / 4);
expect(estimateTokens(json)).toBe(expected);
});
it('handles unicode text', () => {
// Note: estimation is by string length (code units), not bytes
const text = '\u{1F600}'.repeat(10); // emoji
const expected = Math.ceil(text.length / 4);
expect(estimateTokens(text)).toBe(expected);
});
});

View File

@@ -2,7 +2,7 @@ import { defineProject } from 'vitest/config';
export default defineProject({
test: {
name: 'local-proxy',
name: 'mcplocal',
include: ['tests/**/*.test.ts'],
},
});