feat: file cache, pause queue, hot-reload, and cache CLI commands

- Persistent file cache in ~/.mcpctl/cache/proxymodel/ with LRU eviction
- Pause queue for temporarily holding MCP traffic
- Hot-reload watcher for custom stages and proxymodel definitions
- CLI: mcpctl cache list/clear/stats commands
- HTTP endpoints for cache and pause management

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Michal
2026-03-07 23:36:55 +00:00
parent 1665b12c0c
commit a2728f280a
20 changed files with 2082 additions and 10 deletions

View File

@@ -0,0 +1,413 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { FileCache, parseMaxSize } from '../src/proxymodel/file-cache.js';
function makeTmpDir(): string {
return fs.mkdtempSync(path.join(os.tmpdir(), 'mcpctl-cache-test-'));
}
function rmrf(dir: string): void {
try { fs.rmSync(dir, { recursive: true, force: true }); } catch { /* ok */ }
}
describe('FileCache', () => {
let tmpDir: string;
beforeEach(() => {
tmpDir = makeTmpDir();
});
afterEach(() => {
rmrf(tmpDir);
});
// -- Basic get/set --
it('returns null on cache miss', async () => {
const cache = new FileCache('test-ns', { dir: tmpDir });
expect(await cache.get('missing')).toBeNull();
});
it('stores and retrieves values', async () => {
const cache = new FileCache('test-ns', { dir: tmpDir });
await cache.set('key1', 'hello world');
expect(await cache.get('key1')).toBe('hello world');
});
it('getOrCompute computes on miss, returns cached on hit', async () => {
const cache = new FileCache('test-ns', { dir: tmpDir });
let calls = 0;
const compute = async () => { calls++; return 'computed'; };
const v1 = await cache.getOrCompute('k', compute);
const v2 = await cache.getOrCompute('k', compute);
expect(v1).toBe('computed');
expect(v2).toBe('computed');
expect(calls).toBe(1);
});
it('hash produces consistent 16-char hex strings', () => {
const cache = new FileCache('test-ns', { dir: tmpDir });
const h1 = cache.hash('hello');
const h2 = cache.hash('hello');
const h3 = cache.hash('world');
expect(h1).toBe(h2);
expect(h1).not.toBe(h3);
expect(h1).toHaveLength(16);
expect(/^[0-9a-f]+$/.test(h1)).toBe(true);
});
// -- Persistence (L2 disk) --
it('persists values to disk across instances', async () => {
const cache1 = new FileCache('persist-ns', { dir: tmpDir });
await cache1.set('pk', 'persistent-value');
// New instance, same namespace — should find it on disk
const cache2 = new FileCache('persist-ns', { dir: tmpDir });
expect(await cache2.get('pk')).toBe('persistent-value');
});
it('creates .dat files on disk', async () => {
const cache = new FileCache('disk-ns', { dir: tmpDir });
await cache.set('mykey', 'data');
const nsDir = path.join(tmpDir, 'disk-ns');
const files = fs.readdirSync(nsDir).filter((f) => f.endsWith('.dat'));
expect(files.length).toBe(1);
});
// -- Namespace isolation --
it('different namespaces are isolated', async () => {
const cacheA = new FileCache('ns-alpha', { dir: tmpDir });
const cacheB = new FileCache('ns-beta', { dir: tmpDir });
await cacheA.set('shared-key', 'alpha-value');
await cacheB.set('shared-key', 'beta-value');
expect(await cacheA.get('shared-key')).toBe('alpha-value');
expect(await cacheB.get('shared-key')).toBe('beta-value');
});
it('provider--model--proxymodel namespaces are separate', async () => {
const ns1 = 'openai--gpt-4o--content-pipeline';
const ns2 = 'anthropic--claude-sonnet-4-20250514--content-pipeline';
const ns3 = 'openai--gpt-4o--default';
const c1 = new FileCache(ns1, { dir: tmpDir });
const c2 = new FileCache(ns2, { dir: tmpDir });
const c3 = new FileCache(ns3, { dir: tmpDir });
await c1.set('k', 'from-gpt4o-pipeline');
await c2.set('k', 'from-claude-pipeline');
await c3.set('k', 'from-gpt4o-default');
expect(await c1.get('k')).toBe('from-gpt4o-pipeline');
expect(await c2.get('k')).toBe('from-claude-pipeline');
expect(await c3.get('k')).toBe('from-gpt4o-default');
// Verify separate directories on disk
const dirs = fs.readdirSync(tmpDir);
expect(dirs.length).toBe(3);
});
// -- L1 memory cache --
it('L1 memory cache has LRU eviction', async () => {
const cache = new FileCache('lru-ns', { dir: tmpDir, maxMemoryEntries: 3 });
await cache.set('a', '1');
await cache.set('b', '2');
await cache.set('c', '3');
expect(cache.memorySize).toBe(3);
await cache.set('d', '4');
expect(cache.memorySize).toBe(3);
// 'a' evicted from memory but still on disk
cache.clearMemory();
expect(await cache.get('a')).toBe('1'); // restored from disk
});
it('get refreshes LRU position in memory', async () => {
const cache = new FileCache('lru2-ns', { dir: tmpDir, maxMemoryEntries: 3 });
await cache.set('a', '1');
await cache.set('b', '2');
await cache.set('c', '3');
// Access 'a' to refresh it
await cache.get('a');
// Adding 'd' should evict 'b' (now oldest), not 'a'
await cache.set('d', '4');
// 'a' should still be in memory
cache.clearMemory();
// All values still on disk regardless
expect(await cache.get('b')).toBe('2');
});
it('clearMemory only clears L1, not disk', async () => {
const cache = new FileCache('clear-ns', { dir: tmpDir });
await cache.set('k', 'val');
expect(cache.memorySize).toBe(1);
cache.clearMemory();
expect(cache.memorySize).toBe(0);
// Still on disk
expect(await cache.get('k')).toBe('val');
expect(cache.memorySize).toBe(1); // re-loaded into L1
});
// -- Static: stats --
it('stats returns empty for non-existent dir', () => {
const stats = FileCache.stats(path.join(tmpDir, 'nonexistent'));
expect(stats.totalEntries).toBe(0);
expect(stats.totalSize).toBe(0);
expect(stats.namespaces).toHaveLength(0);
});
it('stats reports per-namespace breakdown', async () => {
const c1 = new FileCache('ns-one', { dir: tmpDir });
const c2 = new FileCache('ns-two', { dir: tmpDir });
await c1.set('a', 'hello');
await c1.set('b', 'world');
await c2.set('x', 'data');
const stats = FileCache.stats(tmpDir);
expect(stats.totalEntries).toBe(3);
expect(stats.namespaces).toHaveLength(2);
const one = stats.namespaces.find((ns) => ns.name === 'ns-one');
const two = stats.namespaces.find((ns) => ns.name === 'ns-two');
expect(one?.entries).toBe(2);
expect(two?.entries).toBe(1);
expect(stats.totalSize).toBeGreaterThan(0);
});
// -- Static: clear --
it('clear removes all entries', async () => {
const c = new FileCache('clear-all', { dir: tmpDir });
await c.set('a', '1');
await c.set('b', '2');
const result = FileCache.clear({ rootDir: tmpDir });
expect(result.removed).toBe(2);
expect(result.freedBytes).toBeGreaterThan(0);
const stats = FileCache.stats(tmpDir);
expect(stats.totalEntries).toBe(0);
});
it('clear with namespace only removes that namespace', async () => {
const c1 = new FileCache('keep-me', { dir: tmpDir });
const c2 = new FileCache('delete-me', { dir: tmpDir });
await c1.set('a', '1');
await c2.set('b', '2');
const result = FileCache.clear({ rootDir: tmpDir, namespace: 'delete-me' });
expect(result.removed).toBe(1);
const stats = FileCache.stats(tmpDir);
expect(stats.totalEntries).toBe(1);
const withEntries = stats.namespaces.filter((ns) => ns.entries > 0);
expect(withEntries).toHaveLength(1);
expect(withEntries[0].name).toBe('keep-me');
});
// -- Static: cleanup (TTL + size limit) --
it('cleanup evicts entries exceeding maxSizeBytes', async () => {
// Create entries that exceed a 50-byte limit
const c = new FileCache('big-ns', { dir: tmpDir });
await c.set('a', 'x'.repeat(30));
await c.set('b', 'y'.repeat(30));
await c.set('c', 'z'.repeat(30));
const before = FileCache.stats(tmpDir);
expect(before.totalEntries).toBe(3);
// Cleanup with 50-byte limit (well below 90 bytes of content)
const result = FileCache.cleanup(tmpDir, 50, 365 * 24 * 60 * 60 * 1000);
expect(result.removed).toBeGreaterThan(0);
const after = FileCache.stats(tmpDir);
expect(after.totalSize).toBeLessThanOrEqual(50);
});
// -- Keys with special characters --
it('handles keys with colons and special chars', async () => {
const cache = new FileCache('special-ns', { dir: tmpDir });
await cache.set('summary:abc123:200', 'summarized content');
expect(await cache.get('summary:abc123:200')).toBe('summarized content');
});
it('handles very long keys', async () => {
const cache = new FileCache('long-ns', { dir: tmpDir });
const longKey = 'a'.repeat(500);
await cache.set(longKey, 'value');
expect(await cache.get(longKey)).toBe('value');
});
});
// -- parseMaxSize --
describe('parseMaxSize', () => {
it('passes through numbers directly', () => {
expect(parseMaxSize(1024)).toBe(1024);
expect(parseMaxSize(0)).toBe(0);
});
it('parses byte units', () => {
expect(parseMaxSize('100B')).toBe(100);
expect(parseMaxSize('1KB')).toBe(1024);
expect(parseMaxSize('256MB')).toBe(256 * 1024 * 1024);
expect(parseMaxSize('1GB')).toBe(1024 * 1024 * 1024);
expect(parseMaxSize('2TB')).toBe(2 * 1024 * 1024 * 1024 * 1024);
});
it('handles fractional values', () => {
expect(parseMaxSize('1.5GB')).toBe(Math.floor(1.5 * 1024 * 1024 * 1024));
expect(parseMaxSize('0.5MB')).toBe(Math.floor(0.5 * 1024 * 1024));
});
it('is case-insensitive', () => {
expect(parseMaxSize('256mb')).toBe(256 * 1024 * 1024);
expect(parseMaxSize('1gb')).toBe(1024 * 1024 * 1024);
expect(parseMaxSize('1Gb')).toBe(1024 * 1024 * 1024);
});
it('trims whitespace', () => {
expect(parseMaxSize(' 256MB ')).toBe(256 * 1024 * 1024);
expect(parseMaxSize(' 1 GB ')).toBe(1024 * 1024 * 1024);
});
it('parses plain number strings', () => {
expect(parseMaxSize('1048576')).toBe(1048576);
});
it('parses percentage (resolves against filesystem)', () => {
// We can't predict the exact value, but it should be a positive number
const result = parseMaxSize('10%', '/tmp');
expect(result).toBeGreaterThan(0);
expect(typeof result).toBe('number');
});
it('percentage of 100% equals full partition', () => {
const full = parseMaxSize('100%', '/tmp');
const half = parseMaxSize('50%', '/tmp');
// 50% should be roughly half of 100% (within rounding)
expect(Math.abs(half - full / 2)).toBeLessThan(1024);
});
it('throws on invalid specs', () => {
expect(() => parseMaxSize('abc')).toThrow();
expect(() => parseMaxSize('')).toThrow();
expect(() => parseMaxSize('0%')).toThrow();
expect(() => parseMaxSize('101%')).toThrow();
expect(() => parseMaxSize('-5MB')).toThrow();
});
});
// -- Namespace isolation for LLM provider/model/proxymodel combos --
describe('FileCache namespace isolation', () => {
let tmpDir: string;
beforeEach(() => {
tmpDir = makeTmpDir();
});
afterEach(() => {
rmrf(tmpDir);
});
const combos = [
{ provider: 'openai', model: 'gpt-4o', proxyModel: 'content-pipeline' },
{ provider: 'openai', model: 'gpt-4o-mini', proxyModel: 'content-pipeline' },
{ provider: 'anthropic', model: 'claude-sonnet-4-20250514', proxyModel: 'content-pipeline' },
{ provider: 'openai', model: 'gpt-4o', proxyModel: 'default' },
{ provider: 'vllm', model: 'qwen-72b', proxyModel: 'content-pipeline' },
];
it('each provider--model--proxymodel combo gets its own cache', async () => {
const caches = combos.map(
(c) => new FileCache(`${c.provider}--${c.model}--${c.proxyModel}`, { dir: tmpDir }),
);
// Write same key with different values to each cache
for (let i = 0; i < caches.length; i++) {
await caches[i].set('summary-key', `value-from-combo-${i}`);
}
// Each reads back its own value
for (let i = 0; i < caches.length; i++) {
expect(await caches[i].get('summary-key')).toBe(`value-from-combo-${i}`);
}
// Verify stats show correct number of namespaces
const stats = FileCache.stats(tmpDir);
expect(stats.namespaces).toHaveLength(combos.length);
expect(stats.totalEntries).toBe(combos.length);
});
it('changing only the model creates a separate cache', async () => {
const c1 = new FileCache('openai--gpt-4o--content-pipeline', { dir: tmpDir });
const c2 = new FileCache('openai--gpt-4o-mini--content-pipeline', { dir: tmpDir });
await c1.set('k', 'gpt4o-result');
await c2.set('k', 'mini-result');
expect(await c1.get('k')).toBe('gpt4o-result');
expect(await c2.get('k')).toBe('mini-result');
});
it('changing only the provider creates a separate cache', async () => {
const c1 = new FileCache('openai--gpt-4o--content-pipeline', { dir: tmpDir });
const c2 = new FileCache('anthropic--gpt-4o--content-pipeline', { dir: tmpDir });
await c1.set('k', 'openai-result');
await c2.set('k', 'anthropic-result');
expect(await c1.get('k')).toBe('openai-result');
expect(await c2.get('k')).toBe('anthropic-result');
});
it('changing only the proxyModel creates a separate cache', async () => {
const c1 = new FileCache('openai--gpt-4o--content-pipeline', { dir: tmpDir });
const c2 = new FileCache('openai--gpt-4o--default', { dir: tmpDir });
await c1.set('k', 'pipeline-result');
await c2.set('k', 'default-result');
expect(await c1.get('k')).toBe('pipeline-result');
expect(await c2.get('k')).toBe('default-result');
});
it('clearing one namespace leaves others intact', async () => {
const c1 = new FileCache('openai--gpt-4o--content-pipeline', { dir: tmpDir });
const c2 = new FileCache('anthropic--claude-sonnet-4-20250514--content-pipeline', { dir: tmpDir });
await c1.set('k', 'v1');
await c2.set('k', 'v2');
FileCache.clear({ rootDir: tmpDir, namespace: 'openai--gpt-4o--content-pipeline' });
// c1's namespace cleared, c2 intact
const fresh1 = new FileCache('openai--gpt-4o--content-pipeline', { dir: tmpDir });
const fresh2 = new FileCache('anthropic--claude-sonnet-4-20250514--content-pipeline', { dir: tmpDir });
expect(await fresh1.get('k')).toBeNull();
expect(await fresh2.get('k')).toBe('v2');
});
});

View File

@@ -0,0 +1,135 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { mkdtempSync, writeFileSync, mkdirSync, rmSync, unlinkSync } from 'node:fs';
import { join } from 'node:path';
import { tmpdir } from 'node:os';
import { getStage, listStages, loadCustomStages, clearCustomStages } from '../src/proxymodel/stage-registry.js';
let tempDir: string;
beforeEach(() => {
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-hotreload-'));
clearCustomStages();
});
afterEach(() => {
clearCustomStages();
rmSync(tempDir, { recursive: true, force: true });
});
describe('Hot-reload: stage registry cache busting', () => {
it('loadCustomStages loads .js files', async () => {
writeFileSync(join(tempDir, 'echo.js'), `
export default async function(content, ctx) {
return { content: 'v1:' + content };
}
`);
await loadCustomStages(tempDir);
const handler = getStage('echo');
expect(handler).not.toBeNull();
const result = await handler!('hello', {} as Parameters<typeof handler>[1]);
expect(result.content).toBe('v1:hello');
});
it('reloading picks up file changes via cache busting', async () => {
writeFileSync(join(tempDir, 'transform.js'), `
export default async function(content) {
return { content: 'v1:' + content };
}
`);
await loadCustomStages(tempDir);
let handler = getStage('transform');
let result = await handler!('test', {} as Parameters<typeof handler>[1]);
expect(result.content).toBe('v1:test');
// Overwrite the file with a new version
writeFileSync(join(tempDir, 'transform.js'), `
export default async function(content) {
return { content: 'v2:' + content };
}
`);
// Reload — should pick up the new version due to cache busting
await loadCustomStages(tempDir);
handler = getStage('transform');
result = await handler!('test', {} as Parameters<typeof handler>[1]);
expect(result.content).toBe('v2:test');
});
it('removing a file removes the stage on reload', async () => {
writeFileSync(join(tempDir, 'temp.js'), `
export default async function(content) {
return { content };
}
`);
await loadCustomStages(tempDir);
expect(getStage('temp')).not.toBeNull();
unlinkSync(join(tempDir, 'temp.js'));
await loadCustomStages(tempDir);
expect(getStage('temp')).toBeNull();
});
it('adding a new file makes it available on reload', async () => {
await loadCustomStages(tempDir);
expect(getStage('newstage')).toBeNull();
writeFileSync(join(tempDir, 'newstage.js'), `
export default async function(content) {
return { content: 'new:' + content };
}
`);
await loadCustomStages(tempDir);
const handler = getStage('newstage');
expect(handler).not.toBeNull();
const result = await handler!('x', {} as Parameters<typeof handler>[1]);
expect(result.content).toBe('new:x');
});
it('syntax errors in stage files do not crash reload', async () => {
writeFileSync(join(tempDir, 'good.js'), `
export default async function(content) {
return { content };
}
`);
writeFileSync(join(tempDir, 'bad.js'), 'this is not valid javascript{{{');
await loadCustomStages(tempDir);
// Good stage should still load
expect(getStage('good')).not.toBeNull();
// Bad stage should not be present
expect(getStage('bad')).toBeNull();
});
it('supports .mjs files', async () => {
writeFileSync(join(tempDir, 'mjs-stage.mjs'), `
export default async function(content) {
return { content: 'mjs:' + content };
}
`);
await loadCustomStages(tempDir);
const handler = getStage('mjs-stage');
expect(handler).not.toBeNull();
const result = await handler!('hi', {} as Parameters<typeof handler>[1]);
expect(result.content).toBe('mjs:hi');
});
it('listStages shows custom stages as local', async () => {
writeFileSync(join(tempDir, 'custom.js'), `
export default async function(content) {
return { content };
}
`);
await loadCustomStages(tempDir);
const stages = listStages();
const custom = stages.find((s) => s.name === 'custom');
expect(custom).toBeDefined();
expect(custom!.source).toBe('local');
});
});

View File

@@ -0,0 +1,202 @@
import { describe, it, expect, beforeEach } from 'vitest';
import { pauseQueue } from '../src/proxymodel/pause-queue.js';
beforeEach(() => {
// Reset state between tests
pauseQueue.setPaused(false);
});
describe('PauseQueue', () => {
it('returns transformed content immediately when not paused', async () => {
const result = await pauseQueue.enqueue({
sessionId: 's1',
projectName: 'proj',
contentType: 'toolResult',
sourceName: 'test/tool',
original: 'raw content',
transformed: 'processed content',
});
expect(result).toBe('processed content');
expect(pauseQueue.size).toBe(0);
});
it('holds content when paused', async () => {
pauseQueue.setPaused(true);
let resolved = false;
const promise = pauseQueue.enqueue({
sessionId: 's1',
projectName: 'proj',
contentType: 'toolResult',
sourceName: 'test/tool',
original: 'raw',
transformed: 'processed',
}).then((r) => { resolved = true; return r; });
// Should not resolve immediately
await new Promise((r) => setTimeout(r, 50));
expect(resolved).toBe(false);
expect(pauseQueue.size).toBe(1);
// Release it
const items = pauseQueue.getItems();
expect(items).toHaveLength(1);
pauseQueue.releaseOne(items[0]!.id);
const result = await promise;
expect(result).toBe('processed');
expect(resolved).toBe(true);
expect(pauseQueue.size).toBe(0);
});
it('editAndRelease sends edited content', async () => {
pauseQueue.setPaused(true);
const promise = pauseQueue.enqueue({
sessionId: 's1',
projectName: 'proj',
contentType: 'toolResult',
sourceName: 'test/tool',
original: 'raw',
transformed: 'auto-processed',
});
const items = pauseQueue.getItems();
pauseQueue.editAndRelease(items[0]!.id, 'manually-edited');
const result = await promise;
expect(result).toBe('manually-edited');
});
it('dropOne sends empty content', async () => {
pauseQueue.setPaused(true);
const promise = pauseQueue.enqueue({
sessionId: 's1',
projectName: 'proj',
contentType: 'toolResult',
sourceName: 'test/tool',
original: 'raw',
transformed: 'processed',
});
const items = pauseQueue.getItems();
pauseQueue.dropOne(items[0]!.id);
const result = await promise;
expect(result).toBe('');
});
it('setPaused(false) releases all queued items', async () => {
pauseQueue.setPaused(true);
const p1 = pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool1', original: 'r1', transformed: 't1',
});
const p2 = pauseQueue.enqueue({
sessionId: 's2', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool2', original: 'r2', transformed: 't2',
});
expect(pauseQueue.size).toBe(2);
// Resume releases all
pauseQueue.setPaused(false);
const [r1, r2] = await Promise.all([p1, p2]);
expect(r1).toBe('t1');
expect(r2).toBe('t2');
expect(pauseQueue.size).toBe(0);
});
it('releaseAll releases all items and returns count', async () => {
pauseQueue.setPaused(true);
const p1 = pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool1', original: 'r1', transformed: 't1',
});
const p2 = pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool2', original: 'r2', transformed: 't2',
});
const count = pauseQueue.releaseAll();
expect(count).toBe(2);
const [r1, r2] = await Promise.all([p1, p2]);
expect(r1).toBe('t1');
expect(r2).toBe('t2');
});
it('returns false for operations on non-existent IDs', () => {
expect(pauseQueue.releaseOne('nonexistent')).toBe(false);
expect(pauseQueue.editAndRelease('nonexistent', 'x')).toBe(false);
expect(pauseQueue.dropOne('nonexistent')).toBe(false);
});
it('getItems strips internal resolve function', async () => {
pauseQueue.setPaused(true);
pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool', original: 'raw', transformed: 'processed',
});
const items = pauseQueue.getItems();
expect(items).toHaveLength(1);
const item = items[0]!;
expect(item.id).toBeTruthy();
expect(item.sessionId).toBe('s1');
expect(item.projectName).toBe('proj');
expect(item.contentType).toBe('toolResult');
expect(item.sourceName).toBe('tool');
expect(item.original).toBe('raw');
expect(item.transformed).toBe('processed');
expect(item.timestamp).toBeGreaterThan(0);
// Must not expose resolve
expect((item as Record<string, unknown>)['resolve']).toBeUndefined();
// Cleanup
pauseQueue.releaseAll();
});
it('notifies subscribers on enqueue and release', async () => {
const notifications: number[] = [];
const unsub = pauseQueue.subscribe((items) => notifications.push(items.length));
pauseQueue.setPaused(true);
pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool', original: 'r', transformed: 't',
});
expect(notifications).toContain(1);
pauseQueue.releaseAll();
expect(notifications).toContain(0);
unsub();
});
it('each item gets a unique ID', async () => {
pauseQueue.setPaused(true);
pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool1', original: 'r1', transformed: 't1',
});
pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool2', original: 'r2', transformed: 't2',
});
const items = pauseQueue.getItems();
expect(items).toHaveLength(2);
expect(items[0]!.id).not.toBe(items[1]!.id);
pauseQueue.releaseAll();
});
});

View File

@@ -0,0 +1,126 @@
import { describe, it, expect, beforeAll } from 'vitest';
import http from 'node:http';
import { isMcplocalRunning, mcpctl } from './mcp-client.js';
const MCPLOCAL_URL = process.env['MCPLOCAL_URL'] ?? 'http://localhost:3200';
let available = false;
function fetchJson<T>(urlPath: string, method = 'GET'): Promise<T | null> {
return new Promise((resolve) => {
const req = http.request(`${MCPLOCAL_URL}${urlPath}`, { method, timeout: 5000 }, (res) => {
const chunks: Buffer[] = [];
res.on('data', (chunk: Buffer) => chunks.push(chunk));
res.on('end', () => {
try {
resolve(JSON.parse(Buffer.concat(chunks).toString()) as T);
} catch {
resolve(null);
}
});
});
req.on('error', () => resolve(null));
req.on('timeout', () => { req.destroy(); resolve(null); });
req.end();
});
}
beforeAll(async () => {
available = await isMcplocalRunning();
});
interface CacheStats {
rootDir: string;
totalSize: number;
totalEntries: number;
namespaces: Array<{ name: string; entries: number; size: number }>;
}
interface ClearResult {
removed: number;
freedBytes: number;
}
describe('Cache smoke tests', () => {
describe('mcplocal /cache endpoints', () => {
it('GET /cache/stats returns valid stats structure', async () => {
if (!available) return;
const stats = await fetchJson<CacheStats>('/cache/stats');
expect(stats).not.toBeNull();
expect(stats).toHaveProperty('rootDir');
expect(stats).toHaveProperty('totalSize');
expect(stats).toHaveProperty('totalEntries');
expect(stats).toHaveProperty('namespaces');
expect(Array.isArray(stats!.namespaces)).toBe(true);
expect(typeof stats!.totalSize).toBe('number');
expect(typeof stats!.totalEntries).toBe('number');
});
it('namespaces use provider--model--proxymodel format', async () => {
if (!available) return;
const stats = await fetchJson<CacheStats>('/cache/stats');
if (!stats || stats.namespaces.length === 0) return;
// Each namespace should contain -- separators
for (const ns of stats.namespaces) {
expect(ns.name).toBeTruthy();
expect(typeof ns.entries).toBe('number');
expect(typeof ns.size).toBe('number');
}
});
it('DELETE /cache returns clear result', async () => {
if (!available) return;
// This clears the cache, but it's non-destructive for a smoke test
const result = await fetchJson<ClearResult>('/cache', 'DELETE');
expect(result).not.toBeNull();
expect(result).toHaveProperty('removed');
expect(result).toHaveProperty('freedBytes');
expect(typeof result!.removed).toBe('number');
expect(typeof result!.freedBytes).toBe('number');
});
});
describe('mcpctl cache CLI', () => {
it('mcpctl cache stats shows cache statistics', async () => {
if (!available) return;
const output = await mcpctl('cache stats');
// Should either show table or "Cache is empty."
expect(output.length).toBeGreaterThan(0);
const hasTable = output.includes('NAMESPACE');
const isEmpty = output.includes('Cache is empty');
expect(hasTable || isEmpty).toBe(true);
});
it('mcpctl cache clear runs without error', async () => {
if (!available) return;
const output = await mcpctl('cache clear');
// Should report what was cleared, or that cache is empty
expect(output).toMatch(/[Cc]lear|empty/i);
});
});
describe('cache namespace isolation', () => {
it('stats show separate namespaces per llm provider/model/proxymodel combo', async () => {
if (!available) return;
// After any project MCP sessions have run, check that namespaces
// follow the provider--model--proxymodel convention
const stats = await fetchJson<CacheStats>('/cache/stats');
if (!stats || stats.namespaces.length === 0) return;
// Namespaces with -- separators indicate proper isolation
const separated = stats.namespaces.filter((ns) => ns.name.includes('--'));
// If there are namespaces, at least some should have the separator format
// (the 'dynamic' namespace from hot-swap is an exception)
if (stats.namespaces.length > 1) {
expect(separated.length).toBeGreaterThan(0);
}
});
});
});

View File

@@ -0,0 +1,64 @@
import { describe, it, expect, beforeAll } from 'vitest';
import http from 'node:http';
import { isMcplocalRunning } from './mcp-client.js';
const MCPLOCAL_URL = process.env['MCPLOCAL_URL'] ?? 'http://localhost:3200';
let available = false;
function fetchJson<T>(urlPath: string, method = 'GET', body?: unknown): Promise<T | null> {
return new Promise((resolve) => {
const payload = body !== undefined ? JSON.stringify(body) : undefined;
const req = http.request(`${MCPLOCAL_URL}${urlPath}`, {
method,
timeout: 5000,
headers: payload ? { 'Content-Type': 'application/json', 'Content-Length': Buffer.byteLength(payload) } : {},
}, (res) => {
const chunks: Buffer[] = [];
res.on('data', (chunk: Buffer) => chunks.push(chunk));
res.on('end', () => {
try {
resolve(JSON.parse(Buffer.concat(chunks).toString()) as T);
} catch {
resolve(null);
}
});
});
req.on('error', () => resolve(null));
req.on('timeout', () => { req.destroy(); resolve(null); });
if (payload) req.write(payload);
req.end();
});
}
beforeAll(async () => {
available = await isMcplocalRunning();
});
describe('Hot-reload smoke tests', () => {
describe('GET /proxymodels/stages', () => {
it('returns list of stages with source', async () => {
if (!available) return;
const stages = await fetchJson<Array<{ name: string; source: string }>>('/proxymodels/stages');
expect(stages).not.toBeNull();
expect(Array.isArray(stages)).toBe(true);
expect(stages!.length).toBeGreaterThan(0);
// Should have built-in stages
const passthrough = stages!.find((s) => s.name === 'passthrough');
expect(passthrough).toBeDefined();
expect(passthrough!.source).toBe('built-in');
});
});
describe('POST /proxymodels/reload', () => {
it('reloads stages and returns count', async () => {
if (!available) return;
const result = await fetchJson<{ loaded: number }>('/proxymodels/reload', 'POST');
expect(result).not.toBeNull();
expect(typeof result!.loaded).toBe('number');
});
});
});

View File

@@ -0,0 +1,143 @@
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import http from 'node:http';
import { isMcplocalRunning } from './mcp-client.js';
const MCPLOCAL_URL = process.env['MCPLOCAL_URL'] ?? 'http://localhost:3200';
let available = false;
function fetchJson<T>(urlPath: string, method = 'GET', body?: unknown): Promise<T | null> {
return new Promise((resolve) => {
const payload = body !== undefined ? JSON.stringify(body) : undefined;
const req = http.request(`${MCPLOCAL_URL}${urlPath}`, {
method,
timeout: 5000,
headers: payload ? { 'Content-Type': 'application/json', 'Content-Length': Buffer.byteLength(payload) } : {},
}, (res) => {
const chunks: Buffer[] = [];
res.on('data', (chunk: Buffer) => chunks.push(chunk));
res.on('end', () => {
try {
resolve(JSON.parse(Buffer.concat(chunks).toString()) as T);
} catch {
resolve(null);
}
});
});
req.on('error', () => resolve(null));
req.on('timeout', () => { req.destroy(); resolve(null); });
if (payload) req.write(payload);
req.end();
});
}
beforeAll(async () => {
available = await isMcplocalRunning();
});
afterAll(async () => {
// Always ensure pause is off after tests
if (available) {
await fetchJson('/pause', 'PUT', { paused: false });
}
});
describe('Pause Queue smoke tests', () => {
describe('GET /pause', () => {
it('returns pause state', async () => {
if (!available) return;
const state = await fetchJson<{ paused: boolean; queueSize: number }>('/pause');
expect(state).not.toBeNull();
expect(typeof state!.paused).toBe('boolean');
expect(typeof state!.queueSize).toBe('number');
});
});
describe('PUT /pause', () => {
it('can enable and disable pause mode', async () => {
if (!available) return;
// Enable
const on = await fetchJson<{ paused: boolean; queueSize: number }>('/pause', 'PUT', { paused: true });
expect(on).not.toBeNull();
expect(on!.paused).toBe(true);
// Verify
const state = await fetchJson<{ paused: boolean }>('/pause');
expect(state!.paused).toBe(true);
// Disable
const off = await fetchJson<{ paused: boolean; queueSize: number }>('/pause', 'PUT', { paused: false });
expect(off).not.toBeNull();
expect(off!.paused).toBe(false);
});
it('rejects non-boolean paused value', async () => {
if (!available) return;
const result = await fetchJson<{ error: string }>('/pause', 'PUT', { paused: 'yes' });
expect(result).not.toBeNull();
expect(result!.error).toBeTruthy();
});
});
describe('GET /pause/queue', () => {
it('returns empty queue when not paused', async () => {
if (!available) return;
const result = await fetchJson<{ paused: boolean; items: unknown[] }>('/pause/queue');
expect(result).not.toBeNull();
expect(Array.isArray(result!.items)).toBe(true);
});
});
describe('POST /pause/release-all', () => {
it('returns released count', async () => {
if (!available) return;
const result = await fetchJson<{ released: number; queueSize: number }>('/pause/release-all', 'POST');
expect(result).not.toBeNull();
expect(typeof result!.released).toBe('number');
expect(result!.queueSize).toBe(0);
});
});
describe('POST /pause/queue/:id/release', () => {
it('returns 404 for non-existent item', async () => {
if (!available) return;
const result = await fetchJson<{ error: string }>('/pause/queue/nonexistent/release', 'POST');
expect(result).not.toBeNull();
expect(result!.error).toMatch(/not found/i);
});
});
describe('POST /pause/queue/:id/edit', () => {
it('returns 404 for non-existent item', async () => {
if (!available) return;
const result = await fetchJson<{ error: string }>('/pause/queue/nonexistent/edit', 'POST', { content: 'test' });
expect(result).not.toBeNull();
expect(result!.error).toMatch(/not found/i);
});
it('rejects missing content', async () => {
if (!available) return;
const result = await fetchJson<{ error: string }>('/pause/queue/nonexistent/edit', 'POST', {});
expect(result).not.toBeNull();
expect(result!.error).toBeTruthy();
});
});
describe('POST /pause/queue/:id/drop', () => {
it('returns 404 for non-existent item', async () => {
if (!available) return;
const result = await fetchJson<{ error: string }>('/pause/queue/nonexistent/drop', 'POST');
expect(result).not.toBeNull();
expect(result!.error).toMatch(/not found/i);
});
});
});