feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
/**
|
|
|
|
|
* End-to-end integration tests for the mcpctl 3-tier architecture.
|
|
|
|
|
*
|
|
|
|
|
* These tests wire together the real McpRouter, McpdUpstream, LlmProcessor,
|
|
|
|
|
* TieredHealthMonitor, and discovery logic against a mock mcpd HTTP server
|
|
|
|
|
* (node:http) and a mock LLM provider. No Docker or external services needed.
|
|
|
|
|
*/
|
|
|
|
|
|
2026-02-25 01:29:38 +00:00
|
|
|
import { describe, it, expect, vi, beforeEach, afterEach, afterAll } from 'vitest';
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
import { createServer, type Server, type IncomingMessage, type ServerResponse } from 'node:http';
|
|
|
|
|
|
|
|
|
|
import { McpRouter } from '../../src/router.js';
|
|
|
|
|
import { McpdUpstream } from '../../src/upstream/mcpd.js';
|
|
|
|
|
import { McpdClient } from '../../src/http/mcpd-client.js';
|
|
|
|
|
import { LlmProcessor, DEFAULT_PROCESSOR_CONFIG } from '../../src/llm/processor.js';
|
2026-02-25 01:29:38 +00:00
|
|
|
import { ResponsePaginator } from '../../src/llm/pagination.js';
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
import { ProviderRegistry } from '../../src/providers/registry.js';
|
|
|
|
|
import { TieredHealthMonitor } from '../../src/health/tiered.js';
|
|
|
|
|
import { refreshUpstreams } from '../../src/discovery.js';
|
|
|
|
|
import type { LlmProvider, CompletionResult, CompletionOptions } from '../../src/providers/types.js';
|
|
|
|
|
import type { JsonRpcRequest } from '../../src/types.js';
|
|
|
|
|
|
|
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
// Mock mcpd HTTP server
|
|
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
interface MockMcpdServerConfig {
|
|
|
|
|
/** MCP servers that mcpd reports */
|
|
|
|
|
servers: Array<{ id: string; name: string; transport: string; status?: string | undefined }>;
|
|
|
|
|
/** Map of "serverId:method" -> response payload */
|
|
|
|
|
proxyResponses: Map<string, { result?: unknown; error?: { code: number; message: string } }>;
|
|
|
|
|
/** Instances returned by /api/v1/instances */
|
|
|
|
|
instances: Array<{ name: string; status: string }>;
|
|
|
|
|
/** The expected auth token (all requests must carry this) */
|
|
|
|
|
expectedToken: string;
|
|
|
|
|
/** Track requests for assertion purposes */
|
|
|
|
|
requestLog: Array<{
|
|
|
|
|
method: string;
|
|
|
|
|
url: string;
|
|
|
|
|
authHeader: string | undefined;
|
|
|
|
|
body: unknown;
|
|
|
|
|
}>;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function defaultMockConfig(): MockMcpdServerConfig {
|
|
|
|
|
return {
|
|
|
|
|
servers: [],
|
|
|
|
|
proxyResponses: new Map(),
|
|
|
|
|
instances: [],
|
|
|
|
|
expectedToken: 'test-token-12345',
|
|
|
|
|
requestLog: [],
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Starts a real HTTP server on an ephemeral port that mimics mcpd's API.
|
|
|
|
|
* Returns the server, its base URL, and the config (mutable for per-test tweaks).
|
|
|
|
|
*/
|
|
|
|
|
async function startMockMcpd(
|
|
|
|
|
configOverrides?: Partial<MockMcpdServerConfig>,
|
|
|
|
|
): Promise<{ server: Server; baseUrl: string; config: MockMcpdServerConfig }> {
|
|
|
|
|
const config: MockMcpdServerConfig = { ...defaultMockConfig(), ...configOverrides };
|
|
|
|
|
|
|
|
|
|
const server = createServer(async (req: IncomingMessage, res: ServerResponse) => {
|
|
|
|
|
// Collect body
|
|
|
|
|
const chunks: Buffer[] = [];
|
|
|
|
|
for await (const chunk of req) {
|
|
|
|
|
chunks.push(chunk as Buffer);
|
|
|
|
|
}
|
|
|
|
|
const bodyStr = Buffer.concat(chunks).toString('utf-8');
|
|
|
|
|
let body: unknown;
|
|
|
|
|
try {
|
|
|
|
|
body = bodyStr ? JSON.parse(bodyStr) : undefined;
|
|
|
|
|
} catch {
|
|
|
|
|
body = bodyStr;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Log the request
|
|
|
|
|
config.requestLog.push({
|
|
|
|
|
method: req.method ?? 'GET',
|
|
|
|
|
url: req.url ?? '/',
|
|
|
|
|
authHeader: req.headers['authorization'],
|
|
|
|
|
body,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Auth check
|
|
|
|
|
const auth = req.headers['authorization'];
|
|
|
|
|
if (auth !== `Bearer ${config.expectedToken}`) {
|
|
|
|
|
res.writeHead(401, { 'Content-Type': 'application/json' });
|
|
|
|
|
res.end(JSON.stringify({ error: 'Unauthorized' }));
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const url = req.url ?? '/';
|
|
|
|
|
|
|
|
|
|
// Health endpoint
|
|
|
|
|
if (url === '/health' && req.method === 'GET') {
|
|
|
|
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
|
|
|
res.end(JSON.stringify({ status: 'ok' }));
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Servers list
|
|
|
|
|
if (url === '/api/v1/servers' && req.method === 'GET') {
|
|
|
|
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
|
|
|
res.end(JSON.stringify(config.servers));
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Instances (TieredHealthMonitor uses /instances, other code may use /api/v1/instances)
|
|
|
|
|
if ((url === '/instances' || url === '/api/v1/instances') && req.method === 'GET') {
|
|
|
|
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
|
|
|
res.end(JSON.stringify({ instances: config.instances }));
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// MCP proxy endpoint
|
|
|
|
|
if (url === '/api/v1/mcp/proxy' && req.method === 'POST') {
|
|
|
|
|
const proxyReq = body as { serverId: string; method: string; params?: Record<string, unknown> };
|
|
|
|
|
const key = `${proxyReq.serverId}:${proxyReq.method}`;
|
|
|
|
|
const resp = config.proxyResponses.get(key);
|
|
|
|
|
if (resp) {
|
|
|
|
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
|
|
|
res.end(JSON.stringify(resp));
|
|
|
|
|
} else {
|
|
|
|
|
// Default: return a generic success
|
|
|
|
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
|
|
|
res.end(JSON.stringify({ result: { ok: true } }));
|
|
|
|
|
}
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Catch-all 404
|
|
|
|
|
res.writeHead(404, { 'Content-Type': 'application/json' });
|
|
|
|
|
res.end(JSON.stringify({ error: 'Not found' }));
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Listen on random port
|
|
|
|
|
await new Promise<void>((resolve) => {
|
|
|
|
|
server.listen(0, '127.0.0.1', resolve);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const addr = server.address();
|
|
|
|
|
if (!addr || typeof addr === 'string') {
|
|
|
|
|
throw new Error('Failed to get server address');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
server,
|
|
|
|
|
baseUrl: `http://127.0.0.1:${String(addr.port)}`,
|
|
|
|
|
config,
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
// Mock LLM provider
|
|
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
function createMockLlmProvider(
|
|
|
|
|
name: string,
|
|
|
|
|
handler: (options: CompletionOptions) => string,
|
|
|
|
|
): LlmProvider {
|
|
|
|
|
return {
|
|
|
|
|
name,
|
|
|
|
|
async complete(options: CompletionOptions): Promise<CompletionResult> {
|
|
|
|
|
const content = handler(options);
|
|
|
|
|
return {
|
|
|
|
|
content,
|
|
|
|
|
toolCalls: [],
|
|
|
|
|
usage: { promptTokens: 10, completionTokens: 5, totalTokens: 15 },
|
|
|
|
|
finishReason: 'stop',
|
|
|
|
|
};
|
|
|
|
|
},
|
|
|
|
|
async listModels() {
|
|
|
|
|
return ['mock-model-1'];
|
|
|
|
|
},
|
|
|
|
|
async isAvailable() {
|
|
|
|
|
return true;
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function createFailingLlmProvider(name: string): LlmProvider {
|
|
|
|
|
return {
|
|
|
|
|
name,
|
|
|
|
|
async complete(): Promise<CompletionResult> {
|
|
|
|
|
throw new Error('LLM provider unavailable');
|
|
|
|
|
},
|
|
|
|
|
async listModels() {
|
|
|
|
|
return [];
|
|
|
|
|
},
|
|
|
|
|
async isAvailable() {
|
|
|
|
|
return false;
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
// Tests
|
|
|
|
|
// ---------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
describe('End-to-end integration: 3-tier architecture', () => {
|
|
|
|
|
let mockMcpd: { server: Server; baseUrl: string; config: MockMcpdServerConfig };
|
|
|
|
|
let router: McpRouter;
|
|
|
|
|
|
|
|
|
|
afterEach(async () => {
|
|
|
|
|
if (router) {
|
|
|
|
|
await router.closeAll();
|
|
|
|
|
}
|
|
|
|
|
if (mockMcpd?.server) {
|
|
|
|
|
await new Promise<void>((resolve) => {
|
|
|
|
|
mockMcpd.server.close(() => resolve());
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
// 1. Full tool call flow through McpdUpstream
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
describe('Full tool call flow', () => {
|
|
|
|
|
it('routes a tool call through McpRouter -> McpdUpstream -> mock mcpd and returns the response', async () => {
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [
|
|
|
|
|
{ id: 'srv-slack', name: 'slack', transport: 'stdio' },
|
|
|
|
|
],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-slack:tools/list', {
|
|
|
|
|
result: {
|
|
|
|
|
tools: [
|
|
|
|
|
{ name: 'search_messages', description: 'Search Slack messages', inputSchema: {} },
|
|
|
|
|
{ name: 'send_message', description: 'Send a message', inputSchema: {} },
|
|
|
|
|
],
|
|
|
|
|
},
|
|
|
|
|
}],
|
|
|
|
|
['srv-slack:tools/call', {
|
|
|
|
|
result: {
|
|
|
|
|
content: [{ type: 'text', text: 'Found 3 messages matching "deploy"' }],
|
|
|
|
|
},
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
// Discover servers from mcpd and register them
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
expect(router.getUpstreamNames()).toContain('slack');
|
|
|
|
|
|
|
|
|
|
// Discover tools
|
|
|
|
|
const tools = await router.discoverTools();
|
|
|
|
|
expect(tools.map((t) => t.name)).toContain('slack/search_messages');
|
|
|
|
|
expect(tools.map((t) => t.name)).toContain('slack/send_message');
|
|
|
|
|
|
|
|
|
|
// Call a tool
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'call-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'slack/search_messages', arguments: { query: 'deploy' } },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeUndefined();
|
|
|
|
|
expect(response.result).toEqual({
|
|
|
|
|
content: [{ type: 'text', text: 'Found 3 messages matching "deploy"' }],
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('handles tool call that returns an error from the upstream server', async () => {
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [
|
|
|
|
|
{ id: 'srv-db', name: 'database', transport: 'stdio' },
|
|
|
|
|
],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-db:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'query', description: 'Run SQL query' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-db:tools/call', {
|
|
|
|
|
error: { code: -32000, message: 'Query timeout: exceeded 30s limit' },
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'call-err',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'database/query', arguments: { sql: 'SELECT * FROM huge_table' } },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeDefined();
|
|
|
|
|
expect(response.error?.code).toBe(-32000);
|
|
|
|
|
expect(response.error?.message).toContain('Query timeout');
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
// 2. Tool call with LLM filtering
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
describe('Tool call with LLM filtering', () => {
|
|
|
|
|
it('filters a large response through the LLM processor and returns reduced data', async () => {
|
|
|
|
|
// Generate a large response that exceeds the token threshold (~250 tokens = ~1000 chars)
|
|
|
|
|
const largeItems = Array.from({ length: 100 }, (_, i) => ({
|
|
|
|
|
id: i,
|
|
|
|
|
name: `item-${String(i)}`,
|
|
|
|
|
description: `This is a verbose description for item number ${String(i)} that adds bulk`,
|
|
|
|
|
metadata: { created: '2025-01-01', tags: ['a', 'b', 'c'] },
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
const filteredItems = [
|
|
|
|
|
{ id: 0, name: 'item-0' },
|
|
|
|
|
{ id: 1, name: 'item-1' },
|
|
|
|
|
{ id: 2, name: 'item-2' },
|
|
|
|
|
];
|
|
|
|
|
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-data', name: 'dataserver', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-data:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'search_records', description: 'Search records' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-data:tools/call', {
|
|
|
|
|
result: { items: largeItems },
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
// Create LLM processor with mock provider that returns filtered data
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
const mockProvider = createMockLlmProvider('test-filter', () => {
|
|
|
|
|
return JSON.stringify({ items: filteredItems });
|
|
|
|
|
});
|
|
|
|
|
registry.register(mockProvider);
|
|
|
|
|
|
|
|
|
|
const processor = new LlmProcessor(registry, {
|
|
|
|
|
...DEFAULT_PROCESSOR_CONFIG,
|
|
|
|
|
enableFiltering: true,
|
|
|
|
|
tokenThreshold: 50, // Low threshold so filtering kicks in
|
|
|
|
|
});
|
|
|
|
|
router.setLlmProcessor(processor);
|
|
|
|
|
|
|
|
|
|
// Call the tool - should get filtered response
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'filter-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'dataserver/search_records', arguments: { query: 'test' } },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeUndefined();
|
|
|
|
|
// The result should be the filtered (smaller) version
|
|
|
|
|
const result = response.result as { items: Array<{ id: number; name: string }> };
|
|
|
|
|
expect(result.items).toHaveLength(3);
|
|
|
|
|
expect(result.items[0]?.name).toBe('item-0');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('preserves original response size information in metrics', async () => {
|
|
|
|
|
const largePayload = Array.from({ length: 80 }, (_, i) => ({
|
|
|
|
|
id: i,
|
|
|
|
|
value: 'x'.repeat(30),
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-big', name: 'bigserver', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-big:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'fetch_data', description: 'Fetch data' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-big:tools/call', {
|
|
|
|
|
result: { data: largePayload },
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
const mockProvider = createMockLlmProvider('test-metrics', () => {
|
|
|
|
|
return JSON.stringify({ summary: 'reduced' });
|
|
|
|
|
});
|
|
|
|
|
registry.register(mockProvider);
|
|
|
|
|
|
|
|
|
|
const processor = new LlmProcessor(registry, {
|
|
|
|
|
...DEFAULT_PROCESSOR_CONFIG,
|
|
|
|
|
enableFiltering: true,
|
|
|
|
|
tokenThreshold: 10,
|
|
|
|
|
});
|
|
|
|
|
router.setLlmProcessor(processor);
|
|
|
|
|
|
|
|
|
|
await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'metrics-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'bigserver/fetch_data', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const metrics = processor.getMetrics();
|
|
|
|
|
expect(metrics.filterCount).toBe(1);
|
|
|
|
|
expect(metrics.tokensSaved).toBeGreaterThan(0);
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
// 3. Tool call without LLM (bypass)
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
describe('Tool call without LLM (bypass)', () => {
|
|
|
|
|
it('bypasses LLM filtering for simple create/delete operations', async () => {
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-crud', name: 'crudserver', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-crud:tools/list', {
|
|
|
|
|
result: {
|
|
|
|
|
tools: [
|
|
|
|
|
{ name: 'create_record', description: 'Create a record' },
|
|
|
|
|
{ name: 'delete_record', description: 'Delete a record' },
|
|
|
|
|
{ name: 'search_records', description: 'Search records' },
|
|
|
|
|
],
|
|
|
|
|
},
|
|
|
|
|
}],
|
|
|
|
|
['srv-crud:tools/call', {
|
|
|
|
|
result: { success: true, id: 'new-record-123' },
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
// Set up LLM processor - it should bypass create/delete ops
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
let llmCallCount = 0;
|
|
|
|
|
const mockProvider = createMockLlmProvider('test-bypass', () => {
|
|
|
|
|
llmCallCount++;
|
|
|
|
|
return '{}';
|
|
|
|
|
});
|
|
|
|
|
registry.register(mockProvider);
|
|
|
|
|
|
|
|
|
|
const processor = new LlmProcessor(registry, {
|
|
|
|
|
...DEFAULT_PROCESSOR_CONFIG,
|
|
|
|
|
enableFiltering: true,
|
|
|
|
|
tokenThreshold: 0, // No threshold, always attempt filter
|
|
|
|
|
});
|
|
|
|
|
router.setLlmProcessor(processor);
|
|
|
|
|
|
|
|
|
|
// Call create_record - should bypass LLM
|
|
|
|
|
const createResponse = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'bypass-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'crudserver/create_record', arguments: { name: 'test' } },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(createResponse.error).toBeUndefined();
|
|
|
|
|
expect(createResponse.result).toEqual({ success: true, id: 'new-record-123' });
|
|
|
|
|
expect(llmCallCount).toBe(0); // LLM was not called
|
|
|
|
|
|
|
|
|
|
// Call delete_record - should also bypass
|
|
|
|
|
const deleteResponse = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'bypass-2',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'crudserver/delete_record', arguments: { id: 'record-1' } },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(deleteResponse.error).toBeUndefined();
|
|
|
|
|
expect(llmCallCount).toBe(0); // Still not called
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('passes through directly when no LLM processor is set', async () => {
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-simple', name: 'simpleserver', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-simple:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'get_info', description: 'Get info' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-simple:tools/call', {
|
|
|
|
|
result: { info: 'direct passthrough data' },
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
// No LLM processor set - direct passthrough
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'direct-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'simpleserver/get_info', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeUndefined();
|
|
|
|
|
expect(response.result).toEqual({ info: 'direct passthrough data' });
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
// 4. Server discovery and routing
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
describe('Server discovery and routing', () => {
|
|
|
|
|
it('discovers tools from multiple servers and routes by name prefix', async () => {
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [
|
|
|
|
|
{ id: 'srv-slack', name: 'slack', transport: 'stdio' },
|
|
|
|
|
{ id: 'srv-github', name: 'github', transport: 'stdio' },
|
|
|
|
|
],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-slack:tools/list', {
|
|
|
|
|
result: {
|
|
|
|
|
tools: [
|
|
|
|
|
{ name: 'search_messages', description: 'Search messages' },
|
|
|
|
|
{ name: 'post_message', description: 'Post a message' },
|
|
|
|
|
],
|
|
|
|
|
},
|
|
|
|
|
}],
|
|
|
|
|
['srv-github:tools/list', {
|
|
|
|
|
result: {
|
|
|
|
|
tools: [
|
|
|
|
|
{ name: 'list_issues', description: 'List issues' },
|
|
|
|
|
{ name: 'search_code', description: 'Search code' },
|
|
|
|
|
],
|
|
|
|
|
},
|
|
|
|
|
}],
|
|
|
|
|
['srv-slack:tools/call', {
|
|
|
|
|
result: { source: 'slack', data: 'slack response' },
|
|
|
|
|
}],
|
|
|
|
|
['srv-github:tools/call', {
|
|
|
|
|
result: { source: 'github', data: 'github response' },
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
|
|
|
|
|
// Discover all tools
|
|
|
|
|
const tools = await router.discoverTools();
|
|
|
|
|
expect(tools).toHaveLength(4);
|
|
|
|
|
expect(tools.map((t) => t.name).sort()).toEqual([
|
|
|
|
|
'github/list_issues',
|
|
|
|
|
'github/search_code',
|
|
|
|
|
'slack/post_message',
|
|
|
|
|
'slack/search_messages',
|
|
|
|
|
]);
|
|
|
|
|
|
|
|
|
|
// Route to slack
|
|
|
|
|
const slackResponse = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'multi-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'slack/search_messages', arguments: { query: 'hello' } },
|
|
|
|
|
});
|
|
|
|
|
expect(slackResponse.error).toBeUndefined();
|
|
|
|
|
expect((slackResponse.result as Record<string, unknown>)['source']).toBe('slack');
|
|
|
|
|
|
|
|
|
|
// Route to github
|
|
|
|
|
const githubResponse = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'multi-2',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'github/list_issues', arguments: { repo: 'test' } },
|
|
|
|
|
});
|
|
|
|
|
expect(githubResponse.error).toBeUndefined();
|
|
|
|
|
expect((githubResponse.result as Record<string, unknown>)['source']).toBe('github');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('refreshUpstreams removes stale servers and adds new ones', async () => {
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [
|
|
|
|
|
{ id: 'srv-a', name: 'server-a', transport: 'stdio' },
|
|
|
|
|
{ id: 'srv-b', name: 'server-b', transport: 'stdio' },
|
|
|
|
|
],
|
|
|
|
|
proxyResponses: new Map(),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
// First discovery
|
|
|
|
|
const first = await refreshUpstreams(router, client);
|
|
|
|
|
expect(first.sort()).toEqual(['server-a', 'server-b']);
|
|
|
|
|
expect(router.getUpstreamNames().sort()).toEqual(['server-a', 'server-b']);
|
|
|
|
|
|
|
|
|
|
// Reconfigure mock: remove server-a, add server-c
|
|
|
|
|
mockMcpd.config.servers = [
|
|
|
|
|
{ id: 'srv-b', name: 'server-b', transport: 'stdio' },
|
|
|
|
|
{ id: 'srv-c', name: 'server-c', transport: 'stdio' },
|
|
|
|
|
];
|
|
|
|
|
|
|
|
|
|
// Second discovery
|
|
|
|
|
const second = await refreshUpstreams(router, client);
|
|
|
|
|
expect(second.sort()).toEqual(['server-b', 'server-c']);
|
|
|
|
|
expect(router.getUpstreamNames().sort()).toEqual(['server-b', 'server-c']);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('uses the initialize method to return proxy capabilities', async () => {
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'init-1',
|
|
|
|
|
method: 'initialize',
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeUndefined();
|
|
|
|
|
const result = response.result as Record<string, unknown>;
|
|
|
|
|
expect(result['protocolVersion']).toBe('2024-11-05');
|
|
|
|
|
expect((result['serverInfo'] as Record<string, unknown>)['name']).toBe('mcpctl-proxy');
|
|
|
|
|
const capabilities = result['capabilities'] as Record<string, unknown>;
|
|
|
|
|
expect(capabilities).toHaveProperty('tools');
|
|
|
|
|
expect(capabilities).toHaveProperty('resources');
|
|
|
|
|
expect(capabilities).toHaveProperty('prompts');
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
// 5. Error handling
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
describe('Error handling', () => {
|
|
|
|
|
it('returns graceful error when mcpd is unreachable', async () => {
|
|
|
|
|
// Create a client pointing to a port that nothing listens on
|
|
|
|
|
const client = new McpdClient('http://127.0.0.1:1', 'some-token');
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
// refreshUpstreams will fail because mcpd is unreachable
|
|
|
|
|
await expect(refreshUpstreams(router, client)).rejects.toThrow();
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('returns error response when mcpd proxy call fails mid-request', async () => {
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-flaky', name: 'flaky', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-flaky:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'do_thing', description: 'Do a thing' }] },
|
|
|
|
|
}],
|
|
|
|
|
// No response configured for tools/call -> will get default { result: { ok: true } }
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
// Close the mock server to simulate mcpd going down
|
|
|
|
|
await new Promise<void>((resolve) => {
|
|
|
|
|
mockMcpd.server.close(() => resolve());
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Now try to call - mcpd is down
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'err-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'flaky/do_thing', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeDefined();
|
|
|
|
|
expect(response.error?.message).toContain('mcpd proxy error');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('falls back to unfiltered response when LLM fails', async () => {
|
|
|
|
|
const largePayload = Array.from({ length: 60 }, (_, i) => ({
|
|
|
|
|
id: i,
|
|
|
|
|
data: 'x'.repeat(30),
|
|
|
|
|
}));
|
|
|
|
|
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-fallback', name: 'fallbackserver', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-fallback:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'get_data', description: 'Get data' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-fallback:tools/call', {
|
|
|
|
|
result: { items: largePayload },
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
// Use a failing LLM provider
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
registry.register(createFailingLlmProvider('failing-llm'));
|
|
|
|
|
|
|
|
|
|
const processor = new LlmProcessor(registry, {
|
|
|
|
|
...DEFAULT_PROCESSOR_CONFIG,
|
|
|
|
|
enableFiltering: true,
|
|
|
|
|
tokenThreshold: 10, // Low threshold to trigger filtering
|
|
|
|
|
});
|
|
|
|
|
router.setLlmProcessor(processor);
|
|
|
|
|
|
|
|
|
|
// Call - LLM will fail, should fall back to original response
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'fallback-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'fallbackserver/get_data', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeUndefined();
|
|
|
|
|
// Should still get the original (unfiltered) response
|
|
|
|
|
const result = response.result as { items: Array<{ id: number; data: string }> };
|
|
|
|
|
expect(result.items).toHaveLength(60);
|
|
|
|
|
expect(result.items[0]?.data).toBe('x'.repeat(30));
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('returns proper error for unknown tool name', async () => {
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-known', name: 'knownserver', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-known:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'real_tool', description: 'Real' }] },
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
// Try to call a tool that doesn't exist
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'unknown-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'knownserver/nonexistent_tool', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeDefined();
|
|
|
|
|
expect(response.error?.code).toBe(-32601);
|
|
|
|
|
expect(response.error?.message).toContain('Unknown');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('returns error for completely unknown server prefix', async () => {
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'no-server-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'nonexistent_server/some_tool', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeDefined();
|
|
|
|
|
expect(response.error?.code).toBe(-32601);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('returns method not found for unsupported methods', async () => {
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'bad-method',
|
|
|
|
|
method: 'completions/complete',
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeDefined();
|
|
|
|
|
expect(response.error?.code).toBe(-32601);
|
|
|
|
|
expect(response.error?.message).toContain('Method not found');
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
// 6. Health monitoring
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
describe('Health monitoring', () => {
|
|
|
|
|
it('reports connected when mcpd is healthy', async () => {
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
instances: [
|
|
|
|
|
{ name: 'slack', status: 'running' },
|
|
|
|
|
{ name: 'github', status: 'running' },
|
|
|
|
|
],
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
registry.register(createMockLlmProvider('test-health', () => '{}'));
|
|
|
|
|
|
|
|
|
|
const monitor = new TieredHealthMonitor({
|
|
|
|
|
mcpdClient: client,
|
|
|
|
|
providerRegistry: registry,
|
|
|
|
|
mcpdUrl: mockMcpd.baseUrl,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Need to set router just so afterEach cleanup works
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
const health = await monitor.checkHealth();
|
|
|
|
|
|
|
|
|
|
expect(health.mcplocal.status).toBe('healthy');
|
|
|
|
|
expect(health.mcplocal.llmProvider).toBe('test-health');
|
|
|
|
|
expect(health.mcpd.status).toBe('connected');
|
|
|
|
|
expect(health.mcpd.url).toBe(mockMcpd.baseUrl);
|
|
|
|
|
expect(health.instances).toHaveLength(2);
|
|
|
|
|
expect(health.instances[0]).toEqual({ name: 'slack', status: 'running' });
|
|
|
|
|
expect(health.instances[1]).toEqual({ name: 'github', status: 'running' });
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('reports disconnected when mcpd is unreachable', async () => {
|
|
|
|
|
// Point at a port nothing is listening on
|
|
|
|
|
const client = new McpdClient('http://127.0.0.1:1', 'token');
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
|
|
|
|
|
const monitor = new TieredHealthMonitor({
|
|
|
|
|
mcpdClient: client,
|
|
|
|
|
providerRegistry: registry,
|
|
|
|
|
mcpdUrl: 'http://127.0.0.1:1',
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
const health = await monitor.checkHealth();
|
|
|
|
|
|
|
|
|
|
expect(health.mcplocal.status).toBe('healthy');
|
|
|
|
|
expect(health.mcpd.status).toBe('disconnected');
|
|
|
|
|
expect(health.instances).toEqual([]);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('reports disconnected when mcpdClient is null', async () => {
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
|
|
|
|
|
const monitor = new TieredHealthMonitor({
|
|
|
|
|
mcpdClient: null,
|
|
|
|
|
providerRegistry: registry,
|
|
|
|
|
mcpdUrl: 'http://localhost:3100',
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
const health = await monitor.checkHealth();
|
|
|
|
|
|
|
|
|
|
expect(health.mcpd.status).toBe('disconnected');
|
|
|
|
|
expect(health.instances).toEqual([]);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('reports null LLM provider when none registered', async () => {
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
|
|
|
|
|
const monitor = new TieredHealthMonitor({
|
|
|
|
|
mcpdClient: null,
|
|
|
|
|
providerRegistry: registry,
|
|
|
|
|
mcpdUrl: 'http://localhost:3100',
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
const health = await monitor.checkHealth();
|
|
|
|
|
expect(health.mcplocal.llmProvider).toBeNull();
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
// 7. Auth token propagation
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
describe('Auth token propagation', () => {
|
|
|
|
|
it('sends Bearer token in all requests to mcpd', async () => {
|
|
|
|
|
const secretToken = 'super-secret-bearer-token-xyz';
|
|
|
|
|
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
expectedToken: secretToken,
|
|
|
|
|
servers: [{ id: 'srv-auth', name: 'authserver', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-auth:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'protected_op', description: 'Protected operation' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-auth:tools/call', {
|
|
|
|
|
result: { message: 'authorized access granted' },
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, secretToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
// Discovery calls
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
// Tool call
|
|
|
|
|
await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'auth-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'authserver/protected_op', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Verify all requests carried the correct Bearer token
|
|
|
|
|
const requestLog = mockMcpd.config.requestLog;
|
|
|
|
|
expect(requestLog.length).toBeGreaterThan(0);
|
|
|
|
|
|
|
|
|
|
for (const entry of requestLog) {
|
|
|
|
|
expect(entry.authHeader).toBe(`Bearer ${secretToken}`);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('receives 401 when token is wrong', async () => {
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
expectedToken: 'correct-token',
|
|
|
|
|
servers: [{ id: 'srv-sec', name: 'secserver', transport: 'stdio' }],
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Client uses the wrong token
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, 'wrong-token');
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
// refreshUpstreams should fail because mcpd rejects the auth
|
|
|
|
|
await expect(refreshUpstreams(router, client)).rejects.toThrow();
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('propagates token through McpdUpstream to proxy requests', async () => {
|
|
|
|
|
const token = 'upstream-bearer-token';
|
|
|
|
|
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
expectedToken: token,
|
|
|
|
|
servers: [],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-direct:tools/call', {
|
|
|
|
|
result: { data: 'success' },
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, token);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
// Manually create and add an upstream (bypassing discovery)
|
|
|
|
|
const upstream = new McpdUpstream('srv-direct', 'directserver', client);
|
|
|
|
|
router.addUpstream(upstream);
|
|
|
|
|
|
|
|
|
|
// Send a direct request through the upstream
|
|
|
|
|
const request: JsonRpcRequest = {
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'direct-auth-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'test_tool', arguments: {} },
|
|
|
|
|
};
|
|
|
|
|
const response = await upstream.send(request);
|
|
|
|
|
|
|
|
|
|
expect(response.result).toEqual({ data: 'success' });
|
|
|
|
|
|
|
|
|
|
// Verify the proxy request carried the Bearer token
|
|
|
|
|
const proxyRequests = mockMcpd.config.requestLog.filter(
|
|
|
|
|
(r) => r.url === '/api/v1/mcp/proxy',
|
|
|
|
|
);
|
|
|
|
|
expect(proxyRequests.length).toBeGreaterThan(0);
|
|
|
|
|
for (const req of proxyRequests) {
|
|
|
|
|
expect(req.authHeader).toBe(`Bearer ${token}`);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
// Combined multi-tier scenario
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
describe('Full multi-tier scenario', () => {
|
|
|
|
|
it('exercises the complete lifecycle: init -> discover -> call -> filter -> health', async () => {
|
|
|
|
|
const largeResult = {
|
|
|
|
|
records: Array.from({ length: 50 }, (_, i) => ({
|
|
|
|
|
id: i,
|
|
|
|
|
title: `Record ${String(i)}`,
|
|
|
|
|
body: 'x'.repeat(40),
|
|
|
|
|
})),
|
|
|
|
|
};
|
|
|
|
|
const filteredResult = {
|
|
|
|
|
records: [{ id: 0, title: 'Record 0' }, { id: 1, title: 'Record 1' }],
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [
|
|
|
|
|
{ id: 'srv-wiki', name: 'wiki', transport: 'stdio' },
|
|
|
|
|
{ id: 'srv-jira', name: 'jira', transport: 'stdio' },
|
|
|
|
|
],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-wiki:tools/list', {
|
|
|
|
|
result: {
|
|
|
|
|
tools: [
|
|
|
|
|
{ name: 'search_pages', description: 'Search wiki pages' },
|
|
|
|
|
{ name: 'create_page', description: 'Create a wiki page' },
|
|
|
|
|
],
|
|
|
|
|
},
|
|
|
|
|
}],
|
|
|
|
|
['srv-jira:tools/list', {
|
|
|
|
|
result: {
|
|
|
|
|
tools: [
|
|
|
|
|
{ name: 'search_issues', description: 'Search Jira issues' },
|
|
|
|
|
{ name: 'create_issue', description: 'Create a Jira issue' },
|
|
|
|
|
],
|
|
|
|
|
},
|
|
|
|
|
}],
|
|
|
|
|
['srv-wiki:tools/call', {
|
|
|
|
|
result: largeResult,
|
|
|
|
|
}],
|
|
|
|
|
['srv-jira:tools/call', {
|
|
|
|
|
result: { created: true, key: 'PROJ-123' },
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
instances: [
|
|
|
|
|
{ name: 'wiki', status: 'running' },
|
|
|
|
|
{ name: 'jira', status: 'running' },
|
|
|
|
|
],
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const token = mockMcpd.config.expectedToken;
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, token);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
|
|
|
|
|
// Step 1: Initialize proxy
|
|
|
|
|
const initResponse = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'lifecycle-init',
|
|
|
|
|
method: 'initialize',
|
|
|
|
|
});
|
|
|
|
|
expect(initResponse.error).toBeUndefined();
|
|
|
|
|
|
|
|
|
|
// Step 2: Discover servers
|
|
|
|
|
const registered = await refreshUpstreams(router, client);
|
|
|
|
|
expect(registered.sort()).toEqual(['jira', 'wiki']);
|
|
|
|
|
|
|
|
|
|
// Step 3: Discover tools
|
|
|
|
|
const tools = await router.discoverTools();
|
|
|
|
|
expect(tools).toHaveLength(4);
|
|
|
|
|
|
|
|
|
|
// Step 4: Set up LLM filtering
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
const mockProvider = createMockLlmProvider('lifecycle-llm', () => {
|
|
|
|
|
return JSON.stringify(filteredResult);
|
|
|
|
|
});
|
|
|
|
|
registry.register(mockProvider);
|
|
|
|
|
|
|
|
|
|
const processor = new LlmProcessor(registry, {
|
|
|
|
|
...DEFAULT_PROCESSOR_CONFIG,
|
|
|
|
|
enableFiltering: true,
|
|
|
|
|
tokenThreshold: 10,
|
|
|
|
|
});
|
|
|
|
|
router.setLlmProcessor(processor);
|
|
|
|
|
|
|
|
|
|
// Step 5: Call a search tool -> should get filtered response
|
|
|
|
|
const searchResponse = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'lifecycle-search',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'wiki/search_pages', arguments: { query: 'deployment' } },
|
|
|
|
|
});
|
|
|
|
|
expect(searchResponse.error).toBeUndefined();
|
|
|
|
|
const searchResult = searchResponse.result as { records: Array<{ id: number; title: string }> };
|
|
|
|
|
expect(searchResult.records).toHaveLength(2); // Filtered down
|
|
|
|
|
|
|
|
|
|
// Step 6: Call a create tool -> should bypass LLM
|
|
|
|
|
const createResponse = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'lifecycle-create',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'jira/create_issue', arguments: { summary: 'New bug' } },
|
|
|
|
|
});
|
|
|
|
|
expect(createResponse.error).toBeUndefined();
|
|
|
|
|
expect((createResponse.result as Record<string, unknown>)['key']).toBe('PROJ-123');
|
|
|
|
|
|
|
|
|
|
// Step 7: Check health
|
|
|
|
|
const monitor = new TieredHealthMonitor({
|
|
|
|
|
mcpdClient: client,
|
|
|
|
|
providerRegistry: registry,
|
|
|
|
|
mcpdUrl: mockMcpd.baseUrl,
|
|
|
|
|
});
|
|
|
|
|
const health = await monitor.checkHealth();
|
|
|
|
|
expect(health.mcplocal.status).toBe('healthy');
|
|
|
|
|
expect(health.mcplocal.llmProvider).toBe('lifecycle-llm');
|
|
|
|
|
expect(health.mcpd.status).toBe('connected');
|
|
|
|
|
expect(health.instances).toHaveLength(2);
|
|
|
|
|
|
|
|
|
|
// Step 8: Verify all requests used auth
|
|
|
|
|
for (const entry of mockMcpd.config.requestLog) {
|
|
|
|
|
expect(entry.authHeader).toBe(`Bearer ${token}`);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
});
|
2026-02-25 01:29:38 +00:00
|
|
|
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
// 8. Smart pagination through the full pipeline
|
|
|
|
|
// -----------------------------------------------------------------------
|
|
|
|
|
describe('Smart pagination', () => {
|
|
|
|
|
// Helper: generate a large JSON response (~100KB)
|
|
|
|
|
function makeLargeToolResult(): { flows: Array<{ id: string; type: string; label: string; wires: string[] }> } {
|
|
|
|
|
return {
|
|
|
|
|
flows: Array.from({ length: 200 }, (_, i) => ({
|
|
|
|
|
id: `flow-${String(i).padStart(4, '0')}`,
|
|
|
|
|
type: i % 3 === 0 ? 'function' : i % 3 === 1 ? 'http request' : 'inject',
|
|
|
|
|
label: `Node ${String(i)}: ${i % 3 === 0 ? 'Data transform' : i % 3 === 1 ? 'API call' : 'Timer trigger'}`,
|
|
|
|
|
wires: [`flow-${String(i + 1).padStart(4, '0')}`],
|
|
|
|
|
})),
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
it('paginates large tool response with smart AI summaries through router', async () => {
|
|
|
|
|
const largeResult = makeLargeToolResult();
|
|
|
|
|
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-nodered', name: 'node-red', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-nodered:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'get_flows', description: 'Get all flows' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-nodered:tools/call', {
|
|
|
|
|
result: largeResult,
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
// Set up paginator with LLM provider for smart summaries
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
const completeFn = vi.fn().mockImplementation(() => ({
|
|
|
|
|
content: JSON.stringify([
|
|
|
|
|
{ page: 1, summary: 'Function nodes and data transforms (flow-0000 through flow-0050)' },
|
|
|
|
|
{ page: 2, summary: 'HTTP request nodes and API integrations (flow-0051 through flow-0100)' },
|
|
|
|
|
{ page: 3, summary: 'Inject/timer nodes and triggers (flow-0101 through flow-0150)' },
|
|
|
|
|
{ page: 4, summary: 'Remaining nodes and wire connections (flow-0151 through flow-0199)' },
|
|
|
|
|
]),
|
|
|
|
|
}));
|
|
|
|
|
const mockProvider: LlmProvider = {
|
|
|
|
|
name: 'test-paginator',
|
|
|
|
|
isAvailable: () => true,
|
|
|
|
|
complete: completeFn,
|
|
|
|
|
};
|
|
|
|
|
registry.register(mockProvider);
|
|
|
|
|
|
|
|
|
|
// Low threshold so our response triggers pagination
|
|
|
|
|
const paginator = new ResponsePaginator(registry, {
|
|
|
|
|
sizeThreshold: 1000,
|
|
|
|
|
pageSize: 8000,
|
|
|
|
|
});
|
|
|
|
|
router.setPaginator(paginator);
|
|
|
|
|
|
|
|
|
|
// Call the tool — should get pagination index, not raw data
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'paginate-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'node-red/get_flows', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeUndefined();
|
|
|
|
|
const result = response.result as { content: Array<{ type: string; text: string }> };
|
|
|
|
|
expect(result.content).toHaveLength(1);
|
|
|
|
|
const indexText = result.content[0]!.text;
|
|
|
|
|
|
|
|
|
|
// Verify smart index with AI summaries
|
|
|
|
|
expect(indexText).toContain('AI-generated summaries');
|
|
|
|
|
expect(indexText).toContain('Function nodes and data transforms');
|
|
|
|
|
expect(indexText).toContain('HTTP request nodes');
|
|
|
|
|
expect(indexText).toContain('_resultId');
|
|
|
|
|
expect(indexText).toContain('_page');
|
|
|
|
|
|
|
|
|
|
// LLM was called to generate summaries
|
|
|
|
|
expect(completeFn).toHaveBeenCalledOnce();
|
|
|
|
|
const llmCall = completeFn.mock.calls[0]![0]!;
|
|
|
|
|
expect(llmCall.messages[0].role).toBe('system');
|
|
|
|
|
expect(llmCall.messages[1].content).toContain('node-red/get_flows');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('retrieves specific pages after pagination via _resultId/_page', async () => {
|
|
|
|
|
const largeResult = makeLargeToolResult();
|
|
|
|
|
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-nodered', name: 'node-red', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-nodered:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'get_flows', description: 'Get all flows' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-nodered:tools/call', {
|
|
|
|
|
result: largeResult,
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
// Simple paginator (no LLM) for predictable behavior
|
|
|
|
|
const paginator = new ResponsePaginator(null, {
|
|
|
|
|
sizeThreshold: 1000,
|
|
|
|
|
pageSize: 8000,
|
|
|
|
|
});
|
|
|
|
|
router.setPaginator(paginator);
|
|
|
|
|
|
|
|
|
|
// First call — get the pagination index
|
|
|
|
|
const indexResponse = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'idx-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'node-red/get_flows', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(indexResponse.error).toBeUndefined();
|
|
|
|
|
const indexResult = indexResponse.result as { content: Array<{ text: string }> };
|
|
|
|
|
const indexText = indexResult.content[0]!.text;
|
|
|
|
|
const resultIdMatch = /"_resultId": "([^"]+)"/.exec(indexText);
|
|
|
|
|
expect(resultIdMatch).not.toBeNull();
|
|
|
|
|
const resultId = resultIdMatch![1]!;
|
|
|
|
|
|
|
|
|
|
// Second call — retrieve page 1 via _resultId/_page
|
|
|
|
|
const page1Response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'page-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: {
|
|
|
|
|
name: 'node-red/get_flows',
|
|
|
|
|
arguments: { _resultId: resultId, _page: 1 },
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(page1Response.error).toBeUndefined();
|
|
|
|
|
const page1Result = page1Response.result as { content: Array<{ text: string }> };
|
|
|
|
|
expect(page1Result.content[0]!.text).toContain('Page 1/');
|
|
|
|
|
// Page content should contain flow data
|
|
|
|
|
expect(page1Result.content[0]!.text).toContain('flow-');
|
|
|
|
|
|
|
|
|
|
// Third call — retrieve page 2
|
|
|
|
|
const page2Response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'page-2',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: {
|
|
|
|
|
name: 'node-red/get_flows',
|
|
|
|
|
arguments: { _resultId: resultId, _page: 2 },
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(page2Response.error).toBeUndefined();
|
|
|
|
|
const page2Result = page2Response.result as { content: Array<{ text: string }> };
|
|
|
|
|
expect(page2Result.content[0]!.text).toContain('Page 2/');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('retrieves full content with _page=all', async () => {
|
|
|
|
|
const largeResult = makeLargeToolResult();
|
|
|
|
|
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-nodered', name: 'node-red', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-nodered:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'get_flows', description: 'Get all flows' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-nodered:tools/call', {
|
|
|
|
|
result: largeResult,
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
const paginator = new ResponsePaginator(null, {
|
|
|
|
|
sizeThreshold: 1000,
|
|
|
|
|
pageSize: 8000,
|
|
|
|
|
});
|
|
|
|
|
router.setPaginator(paginator);
|
|
|
|
|
|
|
|
|
|
// Get index
|
|
|
|
|
const indexResponse = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'all-idx',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'node-red/get_flows', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
const indexText = (indexResponse.result as { content: Array<{ text: string }> }).content[0]!.text;
|
|
|
|
|
const resultId = /"_resultId": "([^"]+)"/.exec(indexText)![1]!;
|
|
|
|
|
|
|
|
|
|
// Request all pages
|
|
|
|
|
const allResponse = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'all-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: {
|
|
|
|
|
name: 'node-red/get_flows',
|
|
|
|
|
arguments: { _resultId: resultId, _page: 'all' },
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(allResponse.error).toBeUndefined();
|
|
|
|
|
const allResult = allResponse.result as { content: Array<{ text: string }> };
|
|
|
|
|
// Full response should contain the original JSON
|
|
|
|
|
const fullText = allResult.content[0]!.text;
|
|
|
|
|
expect(fullText).toContain('flow-0000');
|
|
|
|
|
expect(fullText).toContain('flow-0199');
|
|
|
|
|
// Should be the full serialized result
|
|
|
|
|
expect(JSON.parse(fullText)).toEqual(largeResult);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('falls back to simple index when LLM fails', async () => {
|
|
|
|
|
const largeResult = makeLargeToolResult();
|
|
|
|
|
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-nodered', name: 'node-red', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-nodered:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'get_flows', description: 'Get all flows' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-nodered:tools/call', {
|
|
|
|
|
result: largeResult,
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
// Set up paginator with a failing LLM
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
registry.register(createFailingLlmProvider('broken-llm'));
|
|
|
|
|
const paginator = new ResponsePaginator(registry, {
|
|
|
|
|
sizeThreshold: 1000,
|
|
|
|
|
pageSize: 8000,
|
|
|
|
|
});
|
|
|
|
|
router.setPaginator(paginator);
|
|
|
|
|
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'fallback-idx',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'node-red/get_flows', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeUndefined();
|
|
|
|
|
const text = (response.result as { content: Array<{ text: string }> }).content[0]!.text;
|
|
|
|
|
// Should still paginate, just without AI summaries
|
|
|
|
|
expect(text).toContain('_resultId');
|
|
|
|
|
expect(text).not.toContain('AI-generated summaries');
|
|
|
|
|
expect(text).toContain('Page 1:');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('returns expired cache message for stale _resultId', async () => {
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 100, pageSize: 50 });
|
|
|
|
|
router.setPaginator(paginator);
|
|
|
|
|
|
|
|
|
|
// Try to retrieve a page with an unknown resultId
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'stale-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: {
|
|
|
|
|
name: 'anything/tool',
|
|
|
|
|
arguments: { _resultId: 'nonexistent-id', _page: 1 },
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeUndefined();
|
|
|
|
|
const text = (response.result as { content: Array<{ text: string }> }).content[0]!.text;
|
|
|
|
|
expect(text).toContain('expired');
|
|
|
|
|
expect(text).toContain('re-call');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('skips pagination for small responses', async () => {
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-small', name: 'smallserver', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-small:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'get_status', description: 'Get status' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-small:tools/call', {
|
|
|
|
|
result: { status: 'ok', uptime: 12345 },
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
const paginator = new ResponsePaginator(null, { sizeThreshold: 80000, pageSize: 40000 });
|
|
|
|
|
router.setPaginator(paginator);
|
|
|
|
|
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'small-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'smallserver/get_status', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeUndefined();
|
|
|
|
|
// Should return the raw result directly, not a pagination index
|
|
|
|
|
expect(response.result).toEqual({ status: 'ok', uptime: 12345 });
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('handles markdown-fenced LLM responses (Gemini quirk)', async () => {
|
|
|
|
|
const largeResult = makeLargeToolResult();
|
|
|
|
|
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-nodered', name: 'node-red', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-nodered:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'get_flows', description: 'Get all flows' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-nodered:tools/call', {
|
|
|
|
|
result: largeResult,
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
// Simulate Gemini wrapping JSON in ```json fences
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
const mockProvider: LlmProvider = {
|
|
|
|
|
name: 'gemini-mock',
|
|
|
|
|
isAvailable: () => true,
|
|
|
|
|
complete: vi.fn().mockResolvedValue({
|
|
|
|
|
content: '```json\n' + JSON.stringify([
|
|
|
|
|
{ page: 1, summary: 'Climate automation flows' },
|
|
|
|
|
{ page: 2, summary: 'Lighting control flows' },
|
|
|
|
|
]) + '\n```',
|
|
|
|
|
}),
|
|
|
|
|
};
|
|
|
|
|
registry.register(mockProvider);
|
|
|
|
|
|
|
|
|
|
const paginator = new ResponsePaginator(registry, {
|
|
|
|
|
sizeThreshold: 1000,
|
|
|
|
|
pageSize: 8000,
|
|
|
|
|
});
|
|
|
|
|
router.setPaginator(paginator);
|
|
|
|
|
|
|
|
|
|
const response = await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'fence-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'node-red/get_flows', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
expect(response.error).toBeUndefined();
|
|
|
|
|
const text = (response.result as { content: Array<{ text: string }> }).content[0]!.text;
|
|
|
|
|
// Fences were stripped — smart summaries should appear
|
|
|
|
|
expect(text).toContain('AI-generated summaries');
|
|
|
|
|
expect(text).toContain('Climate automation flows');
|
|
|
|
|
expect(text).toContain('Lighting control flows');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('passes model override to LLM when project has custom model', async () => {
|
|
|
|
|
const largeResult = makeLargeToolResult();
|
|
|
|
|
|
|
|
|
|
mockMcpd = await startMockMcpd({
|
|
|
|
|
servers: [{ id: 'srv-nodered', name: 'node-red', transport: 'stdio' }],
|
|
|
|
|
proxyResponses: new Map([
|
|
|
|
|
['srv-nodered:tools/list', {
|
|
|
|
|
result: { tools: [{ name: 'get_flows', description: 'Get all flows' }] },
|
|
|
|
|
}],
|
|
|
|
|
['srv-nodered:tools/call', {
|
|
|
|
|
result: largeResult,
|
|
|
|
|
}],
|
|
|
|
|
]),
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const client = new McpdClient(mockMcpd.baseUrl, mockMcpd.config.expectedToken);
|
|
|
|
|
router = new McpRouter();
|
|
|
|
|
await refreshUpstreams(router, client);
|
|
|
|
|
await router.discoverTools();
|
|
|
|
|
|
|
|
|
|
const registry = new ProviderRegistry();
|
|
|
|
|
const completeFn = vi.fn().mockResolvedValue({
|
|
|
|
|
content: JSON.stringify([{ page: 1, summary: 'test' }]),
|
|
|
|
|
});
|
|
|
|
|
const mockProvider: LlmProvider = {
|
|
|
|
|
name: 'test-model-override',
|
|
|
|
|
isAvailable: () => true,
|
|
|
|
|
complete: completeFn,
|
|
|
|
|
};
|
|
|
|
|
registry.register(mockProvider);
|
|
|
|
|
|
|
|
|
|
// Paginator with per-project model override
|
|
|
|
|
const paginator = new ResponsePaginator(registry, {
|
|
|
|
|
sizeThreshold: 1000,
|
|
|
|
|
pageSize: 80000, // One big page so we get exactly 1 summary
|
|
|
|
|
}, 'gemini-2.5-pro');
|
|
|
|
|
router.setPaginator(paginator);
|
|
|
|
|
|
|
|
|
|
await router.route({
|
|
|
|
|
jsonrpc: '2.0',
|
|
|
|
|
id: 'model-1',
|
|
|
|
|
method: 'tools/call',
|
|
|
|
|
params: { name: 'node-red/get_flows', arguments: {} },
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Verify the model was passed through to the LLM call
|
|
|
|
|
expect(completeFn).toHaveBeenCalledOnce();
|
|
|
|
|
const llmOpts = completeFn.mock.calls[0]![0]!;
|
|
|
|
|
expect(llmOpts.model).toBe('gemini-2.5-pro');
|
|
|
|
|
});
|
|
|
|
|
});
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
});
|