feat: file cache, pause queue, hot-reload, and cache CLI commands

- Persistent file cache in ~/.mcpctl/cache/proxymodel/ with LRU eviction
- Pause queue for temporarily holding MCP traffic
- Hot-reload watcher for custom stages and proxymodel definitions
- CLI: mcpctl cache list/clear/stats commands
- HTTP endpoints for cache and pause management

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Michal
2026-03-07 23:36:55 +00:00
parent 1665b12c0c
commit a2728f280a
20 changed files with 2082 additions and 10 deletions

View File

@@ -0,0 +1,137 @@
import { Command } from 'commander';
import http from 'node:http';
export interface CacheCommandDeps {
log: (...args: string[]) => void;
mcplocalUrl?: string;
}
interface NamespaceStats {
name: string;
entries: number;
size: number;
oldestMs: number;
newestMs: number;
}
interface CacheStats {
rootDir: string;
totalSize: number;
totalEntries: number;
namespaces: NamespaceStats[];
}
interface ClearResult {
removed: number;
freedBytes: number;
}
function formatBytes(bytes: number): string {
if (bytes === 0) return '0 B';
const units = ['B', 'KB', 'MB', 'GB'];
const i = Math.min(Math.floor(Math.log(bytes) / Math.log(1024)), units.length - 1);
const val = bytes / Math.pow(1024, i);
return `${val < 10 ? val.toFixed(1) : Math.round(val)} ${units[i]}`;
}
function formatAge(ms: number): string {
if (ms === 0) return '-';
const age = Date.now() - ms;
const days = Math.floor(age / (24 * 60 * 60 * 1000));
if (days > 0) return `${days}d ago`;
const hours = Math.floor(age / (60 * 60 * 1000));
if (hours > 0) return `${hours}h ago`;
const mins = Math.floor(age / (60 * 1000));
return `${mins}m ago`;
}
function fetchJson<T>(url: string, method = 'GET'): Promise<T> {
return new Promise((resolve, reject) => {
const req = http.request(url, { method, timeout: 5000 }, (res) => {
let data = '';
res.on('data', (chunk: Buffer) => { data += chunk.toString(); });
res.on('end', () => {
try {
resolve(JSON.parse(data) as T);
} catch {
reject(new Error(`Invalid response from mcplocal: ${data.slice(0, 200)}`));
}
});
});
req.on('error', () => reject(new Error('Cannot connect to mcplocal. Is it running?')));
req.on('timeout', () => { req.destroy(); reject(new Error('mcplocal request timed out')); });
req.end();
});
}
export function createCacheCommand(deps: CacheCommandDeps): Command {
const cache = new Command('cache')
.description('Manage ProxyModel pipeline cache');
const mcplocalUrl = deps.mcplocalUrl ?? 'http://localhost:3200';
cache
.command('stats')
.description('Show cache statistics')
.action(async () => {
const stats = await fetchJson<CacheStats>(`${mcplocalUrl}/cache/stats`);
if (stats.totalEntries === 0) {
deps.log('Cache is empty.');
return;
}
deps.log(`Cache: ${formatBytes(stats.totalSize)} total, ${stats.totalEntries} entries`);
deps.log(`Path: ${stats.rootDir}`);
deps.log('');
// Table header
const pad = (s: string, w: number) => s.padEnd(w);
deps.log(
`${pad('NAMESPACE', 40)} ${pad('ENTRIES', 8)} ${pad('SIZE', 10)} ${pad('OLDEST', 12)} NEWEST`,
);
deps.log(
`${pad('-'.repeat(40), 40)} ${pad('-'.repeat(8), 8)} ${pad('-'.repeat(10), 10)} ${pad('-'.repeat(12), 12)} ${'-'.repeat(12)}`,
);
for (const ns of stats.namespaces) {
deps.log(
`${pad(ns.name, 40)} ${pad(String(ns.entries), 8)} ${pad(formatBytes(ns.size), 10)} ${pad(formatAge(ns.oldestMs), 12)} ${formatAge(ns.newestMs)}`,
);
}
});
cache
.command('clear')
.description('Clear cache entries')
.argument('[namespace]', 'Clear only this namespace')
.option('--older-than <days>', 'Clear entries older than N days')
.option('-y, --yes', 'Skip confirmation')
.action(async (namespace: string | undefined, opts: { olderThan?: string; yes?: boolean }) => {
// Show what will be cleared first
const stats = await fetchJson<CacheStats>(`${mcplocalUrl}/cache/stats`);
if (stats.totalEntries === 0) {
deps.log('Cache is already empty.');
return;
}
const target = namespace
? stats.namespaces.find((ns) => ns.name === namespace)
: null;
if (namespace && !target) {
deps.log(`Namespace '${namespace}' not found.`);
deps.log(`Available: ${stats.namespaces.map((ns) => ns.name).join(', ')}`);
return;
}
const olderThan = opts.olderThan ? `?olderThan=${opts.olderThan}` : '';
const url = namespace
? `${mcplocalUrl}/cache/${encodeURIComponent(namespace)}${olderThan}`
: `${mcplocalUrl}/cache${olderThan}`;
const result = await fetchJson<ClearResult>(url, 'DELETE');
deps.log(`Cleared ${result.removed} entries, freed ${formatBytes(result.freedBytes)}`);
});
return cache;
}

View File

@@ -16,6 +16,7 @@ import { createAttachServerCommand, createDetachServerCommand, createApproveComm
import { createMcpCommand } from './commands/mcp.js'; import { createMcpCommand } from './commands/mcp.js';
import { createPatchCommand } from './commands/patch.js'; import { createPatchCommand } from './commands/patch.js';
import { createConsoleCommand } from './commands/console/index.js'; import { createConsoleCommand } from './commands/console/index.js';
import { createCacheCommand } from './commands/cache.js';
import { ApiClient, ApiError } from './api-client.js'; import { ApiClient, ApiError } from './api-client.js';
import { loadConfig } from './config/index.js'; import { loadConfig } from './config/index.js';
import { loadCredentials } from './auth/index.js'; import { loadCredentials } from './auth/index.js';
@@ -211,6 +212,11 @@ export function createProgram(): Command {
getProject: () => program.opts().project as string | undefined, getProject: () => program.opts().project as string | undefined,
})); }));
program.addCommand(createCacheCommand({
log: (...args) => console.log(...args),
mcplocalUrl: config.mcplocalUrl,
}));
return program; return program;
} }

View File

@@ -0,0 +1,36 @@
/**
* Cache management endpoints.
*
* GET /cache/stats → per-namespace stats
* DELETE /cache → clear all
* DELETE /cache/:namespace → clear specific namespace
* DELETE /cache?olderThan=<days> → clear entries older than N days
*/
import type { FastifyInstance } from 'fastify';
import { FileCache } from '../proxymodel/file-cache.js';
export function registerCacheEndpoint(app: FastifyInstance): void {
app.get('/cache/stats', async (_request, reply) => {
const stats = FileCache.stats();
reply.code(200).send(stats);
});
app.delete<{ Querystring: { olderThan?: string } }>('/cache', async (request, reply) => {
const olderThanDays = request.query.olderThan ? Number(request.query.olderThan) : undefined;
const opts: { olderThanMs?: number } = {};
if (olderThanDays) opts.olderThanMs = olderThanDays * 24 * 60 * 60 * 1000;
const result = FileCache.clear(opts);
reply.code(200).send(result);
});
app.delete<{ Params: { namespace: string }; Querystring: { olderThan?: string } }>(
'/cache/:namespace',
async (request, reply) => {
const olderThanDays = request.query.olderThan ? Number(request.query.olderThan) : undefined;
const opts: { namespace: string; olderThanMs?: number } = { namespace: request.params.namespace };
if (olderThanDays) opts.olderThanMs = olderThanDays * 24 * 60 * 60 * 1000;
const result = FileCache.clear(opts);
reply.code(200).send(result);
},
);
}

View File

@@ -0,0 +1,89 @@
/**
* Pause Queue HTTP endpoints.
*
* GET /pause — current pause state + queue size
* PUT /pause — set pause state { paused: boolean }
* GET /pause/queue — list all paused items
* POST /pause/queue/:id/release — release item with transformed content
* POST /pause/queue/:id/edit — edit content and release { content: string }
* POST /pause/queue/:id/drop — drop item (empty response to client)
* POST /pause/release-all — release all paused items
*/
import type { FastifyInstance } from 'fastify';
import { pauseQueue } from '../proxymodel/pause-queue.js';
export function registerPauseEndpoint(app: FastifyInstance): void {
// GET /pause — state
app.get('/pause', async (_request, reply) => {
reply.send({
paused: pauseQueue.paused,
queueSize: pauseQueue.size,
});
});
// PUT /pause — toggle
app.put<{ Body: { paused: boolean } }>('/pause', async (request, reply) => {
const { paused } = request.body ?? {};
if (typeof paused !== 'boolean') {
reply.code(400).send({ error: 'paused must be a boolean' });
return;
}
pauseQueue.setPaused(paused);
reply.send({
paused: pauseQueue.paused,
queueSize: pauseQueue.size,
});
});
// GET /pause/queue — list items
app.get('/pause/queue', async (_request, reply) => {
reply.send({
paused: pauseQueue.paused,
items: pauseQueue.getItems(),
});
});
// POST /pause/queue/:id/release
app.post<{ Params: { id: string } }>('/pause/queue/:id/release', async (request, reply) => {
const ok = pauseQueue.releaseOne(request.params.id);
if (!ok) {
reply.code(404).send({ error: `Item '${request.params.id}' not found in queue` });
return;
}
reply.send({ released: true, queueSize: pauseQueue.size });
});
// POST /pause/queue/:id/edit — edit and release
app.post<{
Params: { id: string };
Body: { content: string };
}>('/pause/queue/:id/edit', async (request, reply) => {
const { content } = request.body ?? {};
if (typeof content !== 'string') {
reply.code(400).send({ error: 'content must be a string' });
return;
}
const ok = pauseQueue.editAndRelease(request.params.id, content);
if (!ok) {
reply.code(404).send({ error: `Item '${request.params.id}' not found in queue` });
return;
}
reply.send({ edited: true, queueSize: pauseQueue.size });
});
// POST /pause/queue/:id/drop
app.post<{ Params: { id: string } }>('/pause/queue/:id/drop', async (request, reply) => {
const ok = pauseQueue.dropOne(request.params.id);
if (!ok) {
reply.code(404).send({ error: `Item '${request.params.id}' not found in queue` });
return;
}
reply.send({ dropped: true, queueSize: pauseQueue.size });
});
// POST /pause/release-all
app.post('/pause/release-all', async (_request, reply) => {
const count = pauseQueue.releaseAll();
reply.send({ released: count, queueSize: 0 });
});
}

View File

@@ -20,7 +20,7 @@ import type { ProviderRegistry } from '../providers/registry.js';
import type { JsonRpcRequest } from '../types.js'; import type { JsonRpcRequest } from '../types.js';
import type { TrafficCapture } from './traffic.js'; import type { TrafficCapture } from './traffic.js';
import { LLMProviderAdapter } from '../proxymodel/llm-adapter.js'; import { LLMProviderAdapter } from '../proxymodel/llm-adapter.js';
import { MemoryCache } from '../proxymodel/cache.js'; import { FileCache } from '../proxymodel/file-cache.js';
import { createDefaultPlugin } from '../proxymodel/plugins/default.js'; import { createDefaultPlugin } from '../proxymodel/plugins/default.js';
import { AuditCollector } from '../audit/collector.js'; import { AuditCollector } from '../audit/collector.js';
@@ -91,7 +91,13 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
complete: async () => '', complete: async () => '',
available: () => false, available: () => false,
}; };
const cache = new MemoryCache(); // Build cache namespace: provider--model--proxymodel
const llmProvider = localOverride?.provider ?? mcpdConfig.llmProvider
?? effectiveRegistry?.getTierProviders('fast')[0]
?? effectiveRegistry?.getActiveName()
?? 'none';
const llmModel = resolvedModel ?? 'default';
const cache = new FileCache(`${llmProvider}--${llmModel}--${proxyModelName}`);
router.setProxyModel(proxyModelName, llmAdapter, cache); router.setProxyModel(proxyModelName, llmAdapter, cache);
// Per-server proxymodel overrides (if mcpd provides them) // Per-server proxymodel overrides (if mcpd provides them)
@@ -382,7 +388,7 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
const llmAdapter = providerRegistry const llmAdapter = providerRegistry
? new LLMProviderAdapter(providerRegistry) ? new LLMProviderAdapter(providerRegistry)
: { complete: async () => '', available: () => false }; : { complete: async () => '', available: () => false };
const cache = new MemoryCache(); const cache = new FileCache('dynamic');
if (serverName && serverProxyModel) { if (serverName && serverProxyModel) {
entry.router.setServerProxyModel(serverName, serverProxyModel, llmAdapter, cache); entry.router.setServerProxyModel(serverName, serverProxyModel, llmAdapter, cache);

View File

@@ -69,6 +69,20 @@ export function registerProxymodelEndpoint(app: FastifyInstance): void {
reply.code(200).send(result); reply.code(200).send(result);
}); });
// GET /proxymodels/stages — list all available stages
// Must be registered before :name to avoid being caught by the param route
app.get('/proxymodels/stages', async (_request, reply) => {
const { listStages } = await import('../proxymodel/stage-registry.js');
reply.code(200).send(listStages());
});
// POST /proxymodels/reload — force reload custom stages from disk
app.post('/proxymodels/reload', async (_request, reply) => {
const { reloadStages } = await import('../proxymodel/watcher.js');
const result = await reloadStages();
reply.code(200).send(result);
});
// GET /proxymodels/:name — single model details // GET /proxymodels/:name — single model details
app.get<{ Params: { name: string } }>('/proxymodels/:name', async (request, reply) => { app.get<{ Params: { name: string } }>('/proxymodels/:name', async (request, reply) => {
const { name } = request.params; const { name } = request.params;

View File

@@ -9,7 +9,7 @@ import type { FastifyInstance } from 'fastify';
import { executePipeline } from '../proxymodel/executor.js'; import { executePipeline } from '../proxymodel/executor.js';
import { getProxyModel } from '../proxymodel/loader.js'; import { getProxyModel } from '../proxymodel/loader.js';
import { LLMProviderAdapter } from '../proxymodel/llm-adapter.js'; import { LLMProviderAdapter } from '../proxymodel/llm-adapter.js';
import { MemoryCache } from '../proxymodel/cache.js'; import { FileCache } from '../proxymodel/file-cache.js';
import type { ProviderRegistry } from '../providers/registry.js'; import type { ProviderRegistry } from '../providers/registry.js';
import type { ContentType, Section } from '../proxymodel/types.js'; import type { ContentType, Section } from '../proxymodel/types.js';
@@ -28,8 +28,18 @@ interface ReplayResponse {
durationMs: number; durationMs: number;
} }
// Shared cache across replay calls (replay is ephemeral, not per-session) // Cache instances per namespace (replay uses varied provider/model combos)
const replayCache = new MemoryCache({ maxEntries: 500 }); const replayCaches = new Map<string, FileCache>();
function getReplayCache(provider: string, model: string, proxyModel: string): FileCache {
const ns = `${provider}--${model}--${proxyModel}`;
let cache = replayCaches.get(ns);
if (!cache) {
cache = new FileCache(ns);
replayCaches.set(ns, cache);
}
return cache;
}
export function registerReplayEndpoint(app: FastifyInstance, providerRegistry?: ProviderRegistry | null): void { export function registerReplayEndpoint(app: FastifyInstance, providerRegistry?: ProviderRegistry | null): void {
app.post<{ Body: ReplayRequestBody }>('/proxymodel/replay', async (request, reply) => { app.post<{ Body: ReplayRequestBody }>('/proxymodel/replay', async (request, reply) => {
@@ -70,7 +80,7 @@ export function registerReplayEndpoint(app: FastifyInstance, providerRegistry?:
sessionId: `replay-${Date.now()}`, sessionId: `replay-${Date.now()}`,
proxyModel, proxyModel,
llm, llm,
cache: replayCache, cache: getReplayCache(providerName ?? 'default', llmModel ?? 'default', modelName),
}); });
const response: ReplayResponse = { const response: ReplayResponse = {

View File

@@ -10,6 +10,8 @@ import { registerProjectMcpEndpoint } from './project-mcp-endpoint.js';
import { registerInspectEndpoint } from './inspect-endpoint.js'; import { registerInspectEndpoint } from './inspect-endpoint.js';
import { registerProxymodelEndpoint } from './proxymodel-endpoint.js'; import { registerProxymodelEndpoint } from './proxymodel-endpoint.js';
import { registerReplayEndpoint } from './replay-endpoint.js'; import { registerReplayEndpoint } from './replay-endpoint.js';
import { registerCacheEndpoint } from './cache-endpoint.js';
import { registerPauseEndpoint } from './pause-endpoint.js';
import { TrafficCapture } from './traffic.js'; import { TrafficCapture } from './traffic.js';
import type { McpRouter } from '../router.js'; import type { McpRouter } from '../router.js';
import type { HealthMonitor } from '../health.js'; import type { HealthMonitor } from '../health.js';
@@ -224,6 +226,12 @@ export async function createHttpServer(
// ProxyModel replay endpoint (stateless pipeline execution) // ProxyModel replay endpoint (stateless pipeline execution)
registerReplayEndpoint(app, deps.providerRegistry); registerReplayEndpoint(app, deps.providerRegistry);
// Cache management endpoints
registerCacheEndpoint(app);
// Pause queue endpoints (live debugging)
registerPauseEndpoint(app);
// Proxy management routes to mcpd // Proxy management routes to mcpd
const mcpdClient = new McpdClient(config.mcpdUrl, config.mcpdToken); const mcpdClient = new McpdClient(config.mcpdUrl, config.mcpdToken);
registerProxyRoutes(app, mcpdClient); registerProxyRoutes(app, mcpdClient);

View File

@@ -12,6 +12,7 @@ import type { HttpConfig } from './http/config.js';
import { createProvidersFromConfig } from './llm-config.js'; import { createProvidersFromConfig } from './llm-config.js';
import { createSecretStore } from '@mcpctl/shared'; import { createSecretStore } from '@mcpctl/shared';
import type { ProviderRegistry } from './providers/registry.js'; import type { ProviderRegistry } from './providers/registry.js';
import { startWatchers, stopWatchers, reloadStages } from './proxymodel/watcher.js';
interface ParsedArgs { interface ParsedArgs {
configPath: string | undefined; configPath: string | undefined;
@@ -139,12 +140,17 @@ export async function main(argv: string[] = process.argv): Promise<MainResult> {
process.stderr.write(`mcpctl-proxy HTTP server listening on ${httpConfig.httpHost}:${httpConfig.httpPort}\n`); process.stderr.write(`mcpctl-proxy HTTP server listening on ${httpConfig.httpHost}:${httpConfig.httpPort}\n`);
} }
// Load custom stages and start file watchers for hot-reload
await reloadStages();
startWatchers();
// Graceful shutdown // Graceful shutdown
let shuttingDown = false; let shuttingDown = false;
const shutdown = async () => { const shutdown = async () => {
if (shuttingDown) return; if (shuttingDown) return;
shuttingDown = true; shuttingDown = true;
stopWatchers();
providerRegistry.disposeAll(); providerRegistry.disposeAll();
server.stop(); server.stop();
if (httpServer) { if (httpServer) {

View File

@@ -0,0 +1,427 @@
/**
* Persistent file-backed cache with in-memory L1.
*
* Storage layout:
* ~/.mcpctl/cache/<namespace>/<key>.dat
*
* - namespace = sanitized "provider--model--proxymodel"
* - key = 16-char hex content hash (from CacheProvider.hash)
* - .dat = raw cached content (no JSON wrapper)
* - mtime tracks last access for LRU eviction
*
* L1 in-memory Map provides fast repeated lookups within a session.
* L2 disk provides persistence across mcplocal restarts.
*/
import { createHash } from 'node:crypto';
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import type { CacheProvider } from './types.js';
export interface FileCacheConfig {
/** Cache root directory. Default: ~/.mcpctl/cache */
dir?: string;
/**
* Max total cache size. Accepts:
* - number: bytes (e.g. 268435456)
* - string with unit: "256MB", "1GB", "500KB", "2TB"
* - percentage of partition: "10%", "50%"
* Default: 256MB
*/
maxSize?: number | string;
/** @deprecated Use maxSize instead */
maxSizeBytes?: number;
/** TTL in ms since last access. Default: 30 days */
ttlMs?: number;
/** Max in-memory L1 entries per instance. Default: 500 */
maxMemoryEntries?: number;
}
const DEFAULT_DIR = path.join(os.homedir(), '.mcpctl', 'cache');
const DEFAULT_MAX_SIZE = 256 * 1024 * 1024; // 256MB
const DEFAULT_TTL = 30 * 24 * 60 * 60 * 1000; // 30 days
const DEFAULT_MEMORY = 500;
const SIZE_UNITS: Record<string, number> = {
B: 1,
KB: 1024,
MB: 1024 * 1024,
GB: 1024 * 1024 * 1024,
TB: 1024 * 1024 * 1024 * 1024,
};
/**
* Parse a human-readable size spec into bytes.
* - number → bytes
* - "256MB", "1.5GB", "500KB", "2TB" → bytes
* - "10%", "50%" → percentage of the filesystem hosting cacheDir
*/
export function parseMaxSize(spec: number | string, cacheDir?: string): number {
if (typeof spec === 'number') return spec;
const s = spec.trim().toUpperCase();
// Percentage of partition
const pctMatch = s.match(/^(\d+(?:\.\d+)?)\s*%$/);
if (pctMatch && pctMatch[1]) {
const pct = parseFloat(pctMatch[1]);
if (pct <= 0 || pct > 100) throw new Error(`Invalid cache size percentage: ${spec}`);
const dir = cacheDir ?? DEFAULT_DIR;
try {
const stat = fs.statfsSync(dir);
const totalBytes = stat.bsize * stat.blocks;
return Math.floor(totalBytes * (pct / 100));
} catch {
// Partition not stat-able (dir doesn't exist yet) — try parent
try {
const stat = fs.statfsSync(path.dirname(dir));
const totalBytes = stat.bsize * stat.blocks;
return Math.floor(totalBytes * (pct / 100));
} catch {
// Fallback to default
return DEFAULT_MAX_SIZE;
}
}
}
// Unit-based: "256MB", "1.5GB" etc.
const unitMatch = s.match(/^(\d+(?:\.\d+)?)\s*(B|KB|MB|GB|TB)$/);
if (unitMatch && unitMatch[1] && unitMatch[2]) {
const val = parseFloat(unitMatch[1]);
const multiplier = SIZE_UNITS[unitMatch[2]];
if (multiplier !== undefined) return Math.floor(val * multiplier);
}
// Plain number string
const n = Number(spec);
if (!isNaN(n) && n > 0) return Math.floor(n);
throw new Error(`Invalid cache maxSize: "${spec}". Use bytes, "256MB", "1GB", or "10%"`);
}
/** Sanitize namespace for filesystem use. */
function sanitize(s: string): string {
return s.replace(/[^a-zA-Z0-9._-]/g, '_').slice(0, 100);
}
export class FileCache implements CacheProvider {
private readonly memory = new Map<string, string>();
private readonly nsDir: string;
private readonly rootDir: string;
private readonly maxSizeBytes: number;
private readonly ttlMs: number;
private readonly maxMemoryEntries: number;
private dirReady = false;
private cleanupScheduled = false;
constructor(
readonly namespace: string,
config?: FileCacheConfig,
) {
this.rootDir = config?.dir ?? DEFAULT_DIR;
this.nsDir = path.join(this.rootDir, sanitize(namespace));
this.maxSizeBytes = config?.maxSize
? parseMaxSize(config.maxSize, this.rootDir)
: (config?.maxSizeBytes ?? DEFAULT_MAX_SIZE);
this.ttlMs = config?.ttlMs ?? DEFAULT_TTL;
this.maxMemoryEntries = config?.maxMemoryEntries ?? DEFAULT_MEMORY;
}
hash(content: string): string {
return createHash('sha256').update(content).digest('hex').slice(0, 16);
}
async getOrCompute(key: string, compute: () => Promise<string>): Promise<string> {
const cached = await this.get(key);
if (cached !== null) return cached;
const value = await compute();
await this.set(key, value);
return value;
}
async get(key: string): Promise<string | null> {
// L1: memory
const memVal = this.memory.get(key);
if (memVal !== undefined) {
// LRU: move to end
this.memory.delete(key);
this.memory.set(key, memVal);
return memVal;
}
// L2: disk
const filePath = this.keyPath(key);
try {
const value = fs.readFileSync(filePath, 'utf-8');
// Touch mtime for LRU (fire-and-forget)
const now = new Date();
fs.utimes(filePath, now, now, () => {});
this.setMemory(key, value);
return value;
} catch {
return null;
}
}
async set(key: string, value: string): Promise<void> {
this.setMemory(key, value);
// L2: persist to disk
this.ensureDir();
const filePath = this.keyPath(key);
try {
fs.writeFileSync(filePath, value);
} catch {
// Disk write failure is non-fatal — memory cache still works
}
this.scheduleCleanup();
}
/** Number of in-memory entries. */
get memorySize(): number {
return this.memory.size;
}
/** Clear in-memory L1 cache. */
clearMemory(): void {
this.memory.clear();
}
// -- Static helpers for CLI / lifecycle --
/** Get stats for all namespaces under the cache root. */
static stats(rootDir?: string): CacheStats {
const dir = rootDir ?? DEFAULT_DIR;
const namespaces: NamespaceStats[] = [];
let totalSize = 0;
let totalEntries = 0;
try {
const dirs = fs.readdirSync(dir, { withFileTypes: true });
for (const d of dirs) {
if (!d.isDirectory()) continue;
const nsPath = path.join(dir, d.name);
const ns = scanNamespace(nsPath);
namespaces.push({ name: d.name, ...ns });
totalSize += ns.size;
totalEntries += ns.entries;
}
} catch {
// Cache dir doesn't exist yet
}
return { rootDir: dir, totalSize, totalEntries, namespaces };
}
/** Clear a specific namespace or all namespaces. */
static clear(options?: { rootDir?: string; namespace?: string; olderThanMs?: number }): ClearResult {
const dir = options?.rootDir ?? DEFAULT_DIR;
let removed = 0;
let freedBytes = 0;
if (options?.namespace) {
const nsPath = path.join(dir, sanitize(options.namespace));
const result = clearDir(nsPath, options.olderThanMs);
removed = result.removed;
freedBytes = result.freedBytes;
} else {
try {
const dirs = fs.readdirSync(dir, { withFileTypes: true });
for (const d of dirs) {
if (!d.isDirectory()) continue;
const result = clearDir(path.join(dir, d.name), options?.olderThanMs);
removed += result.removed;
freedBytes += result.freedBytes;
}
} catch {
// No cache dir
}
}
return { removed, freedBytes };
}
/** Evict expired entries and enforce size limit across all namespaces. */
static cleanup(rootDir?: string, maxSizeBytes?: number, ttlMs?: number): ClearResult {
const dir = rootDir ?? DEFAULT_DIR;
const maxSize = maxSizeBytes ?? DEFAULT_MAX_SIZE;
const ttl = ttlMs ?? DEFAULT_TTL;
let removed = 0;
let freedBytes = 0;
// Phase 1: remove expired entries
const expired = FileCache.clear({ rootDir: dir, olderThanMs: ttl });
removed += expired.removed;
freedBytes += expired.freedBytes;
// Phase 2: if still over size limit, evict LRU
const stats = FileCache.stats(dir);
if (stats.totalSize > maxSize) {
const entries = collectAllEntries(dir);
// Sort by mtime ascending (oldest first)
entries.sort((a, b) => a.mtimeMs - b.mtimeMs);
let currentSize = stats.totalSize;
for (const entry of entries) {
if (currentSize <= maxSize * 0.8) break; // Evict down to 80%
try {
fs.unlinkSync(entry.path);
currentSize -= entry.size;
removed++;
freedBytes += entry.size;
} catch {
// Already deleted
}
}
}
// Phase 3: remove empty namespace dirs
try {
const dirs = fs.readdirSync(dir, { withFileTypes: true });
for (const d of dirs) {
if (!d.isDirectory()) continue;
const nsPath = path.join(dir, d.name);
try {
const files = fs.readdirSync(nsPath);
if (files.length === 0) fs.rmdirSync(nsPath);
} catch { /* ignore */ }
}
} catch { /* no cache dir */ }
return { removed, freedBytes };
}
// -- Private --
private keyPath(key: string): string {
// Key may contain colons (e.g. "summary:abc123:200"), hash it for safe filename
const safeKey = key.length <= 32 && /^[a-zA-Z0-9_-]+$/.test(key)
? key
: this.hash(key);
return path.join(this.nsDir, `${safeKey}.dat`);
}
private setMemory(key: string, value: string): void {
if (this.memory.size >= this.maxMemoryEntries) {
const oldest = this.memory.keys().next().value as string;
this.memory.delete(oldest);
}
this.memory.set(key, value);
}
private ensureDir(): void {
if (this.dirReady) return;
try {
fs.mkdirSync(this.nsDir, { recursive: true });
this.dirReady = true;
} catch { /* ignore */ }
}
private scheduleCleanup(): void {
if (this.cleanupScheduled) return;
this.cleanupScheduled = true;
// Run cleanup in background after a short delay
setTimeout(() => {
FileCache.cleanup(this.rootDir, this.maxSizeBytes, this.ttlMs);
this.cleanupScheduled = false;
}, 5000);
}
}
// -- Types --
export interface CacheStats {
rootDir: string;
totalSize: number;
totalEntries: number;
namespaces: NamespaceStats[];
}
export interface NamespaceStats {
name: string;
entries: number;
size: number;
oldestMs: number;
newestMs: number;
}
export interface ClearResult {
removed: number;
freedBytes: number;
}
// -- Helpers --
function scanNamespace(nsPath: string): { entries: number; size: number; oldestMs: number; newestMs: number } {
let entries = 0;
let size = 0;
let oldestMs = Infinity;
let newestMs = 0;
try {
const files = fs.readdirSync(nsPath);
for (const f of files) {
if (!f.endsWith('.dat')) continue;
try {
const st = fs.statSync(path.join(nsPath, f));
entries++;
size += st.size;
if (st.mtimeMs < oldestMs) oldestMs = st.mtimeMs;
if (st.mtimeMs > newestMs) newestMs = st.mtimeMs;
} catch { /* skip */ }
}
} catch { /* dir gone */ }
return { entries, size, oldestMs: entries > 0 ? oldestMs : 0, newestMs };
}
function clearDir(nsPath: string, olderThanMs?: number): { removed: number; freedBytes: number } {
let removed = 0;
let freedBytes = 0;
const cutoff = olderThanMs ? Date.now() - olderThanMs : 0;
try {
const files = fs.readdirSync(nsPath);
for (const f of files) {
if (!f.endsWith('.dat')) continue;
const fp = path.join(nsPath, f);
try {
if (olderThanMs) {
const st = fs.statSync(fp);
if (st.mtimeMs >= cutoff) continue;
freedBytes += st.size;
} else {
const st = fs.statSync(fp);
freedBytes += st.size;
}
fs.unlinkSync(fp);
removed++;
} catch { /* already gone */ }
}
} catch { /* dir gone */ }
return { removed, freedBytes };
}
function collectAllEntries(rootDir: string): Array<{ path: string; size: number; mtimeMs: number }> {
const entries: Array<{ path: string; size: number; mtimeMs: number }> = [];
try {
const dirs = fs.readdirSync(rootDir, { withFileTypes: true });
for (const d of dirs) {
if (!d.isDirectory()) continue;
const nsPath = path.join(rootDir, d.name);
try {
const files = fs.readdirSync(nsPath);
for (const f of files) {
if (!f.endsWith('.dat')) continue;
const fp = path.join(nsPath, f);
try {
const st = fs.statSync(fp);
entries.push({ path: fp, size: st.size, mtimeMs: st.mtimeMs });
} catch { /* skip */ }
}
} catch { /* skip */ }
}
} catch { /* skip */ }
return entries;
}

View File

@@ -32,6 +32,10 @@ export { getStage, listStages, loadCustomStages, clearCustomStages } from './sta
export { BUILT_IN_STAGES } from './stages/index.js'; export { BUILT_IN_STAGES } from './stages/index.js';
export { detectContentType } from './content-type.js'; export { detectContentType } from './content-type.js';
export { MemoryCache } from './cache.js'; export { MemoryCache } from './cache.js';
export { FileCache } from './file-cache.js';
export { pauseQueue } from './pause-queue.js';
export type { PausedItem } from './pause-queue.js';
export { startWatchers, stopWatchers, reloadStages, onReload } from './watcher.js';
export { LLMProviderAdapter } from './llm-adapter.js'; export { LLMProviderAdapter } from './llm-adapter.js';
// Types // Types

View File

@@ -0,0 +1,142 @@
/**
* Pause Queue — intercepts pipeline output for live debugging.
*
* When paused, content that has been through the pipeline is held in a queue
* instead of being returned to the client. The operator can:
* - release: send the transformed content as-is
* - edit: modify the content before sending
* - drop: send empty content (suppresses the response)
*
* Singleton — shared across all sessions in the mcplocal process.
*/
import { randomUUID } from 'node:crypto';
export interface PausedItem {
id: string;
sessionId: string;
projectName: string;
contentType: string;
sourceName: string;
original: string;
transformed: string;
timestamp: number;
}
interface QueueEntry extends PausedItem {
resolve: (content: string) => void;
}
type QueueListener = (items: PausedItem[]) => void;
class PauseQueue {
private _paused = false;
private queue: QueueEntry[] = [];
private listeners = new Set<QueueListener>();
get paused(): boolean {
return this._paused;
}
setPaused(paused: boolean): void {
this._paused = paused;
if (!paused) {
// Release all with their transformed content
for (const entry of this.queue) {
entry.resolve(entry.transformed);
}
this.queue = [];
}
this.notify();
}
/**
* Enqueue content after pipeline processing.
* If not paused, returns immediately with the transformed content.
* If paused, blocks until the item is released/edited/dropped.
*/
enqueue(item: Omit<PausedItem, 'id' | 'timestamp'>): Promise<string> {
if (!this._paused) return Promise.resolve(item.transformed);
return new Promise<string>((resolve) => {
this.queue.push({
...item,
id: randomUUID(),
timestamp: Date.now(),
resolve,
});
this.notify();
});
}
/** Get all queued items (without resolve functions). */
getItems(): PausedItem[] {
return this.queue.map(({ resolve: _, ...item }) => item);
}
/** Release one item with its transformed content. */
releaseOne(id: string): boolean {
const idx = this.queue.findIndex((q) => q.id === id);
if (idx < 0) return false;
const entry = this.queue.splice(idx, 1)[0]!;
entry.resolve(entry.transformed);
this.notify();
return true;
}
/** Edit the content and release. */
editAndRelease(id: string, content: string): boolean {
const idx = this.queue.findIndex((q) => q.id === id);
if (idx < 0) return false;
const entry = this.queue.splice(idx, 1)[0]!;
entry.resolve(content);
this.notify();
return true;
}
/** Drop an item (sends empty content back to client). */
dropOne(id: string): boolean {
const idx = this.queue.findIndex((q) => q.id === id);
if (idx < 0) return false;
const entry = this.queue.splice(idx, 1)[0]!;
entry.resolve('');
this.notify();
return true;
}
/** Release all items with their transformed content. Returns count released. */
releaseAll(): number {
const count = this.queue.length;
for (const entry of this.queue) {
entry.resolve(entry.transformed);
}
this.queue = [];
this.notify();
return count;
}
/** Subscribe to queue changes. Returns unsubscribe function. */
subscribe(cb: QueueListener): () => void {
this.listeners.add(cb);
return () => {
this.listeners.delete(cb);
};
}
get size(): number {
return this.queue.length;
}
private notify(): void {
const items = this.getItems();
for (const cb of this.listeners) {
try {
cb(items);
} catch {
// Don't let a bad listener break the queue
}
}
}
}
/** Singleton pause queue shared across all sessions. */
export const pauseQueue = new PauseQueue();

View File

@@ -8,9 +8,13 @@ import { BUILT_IN_STAGES } from './stages/index.js';
const customStages = new Map<string, StageHandler>(); const customStages = new Map<string, StageHandler>();
/** Counter for cache-busting dynamic imports on reload. */
let loadGeneration = 0;
/** /**
* Load custom stages from a directory. * Load custom stages from a directory.
* Each .js file exports a default StageHandler. * Each .js file exports a default StageHandler.
* Uses a generation counter to bust Node's module cache on reload.
*/ */
export async function loadCustomStages(dir: string): Promise<void> { export async function loadCustomStages(dir: string): Promise<void> {
const { readdir } = await import('node:fs/promises'); const { readdir } = await import('node:fs/promises');
@@ -18,13 +22,16 @@ export async function loadCustomStages(dir: string): Promise<void> {
const { pathToFileURL } = await import('node:url'); const { pathToFileURL } = await import('node:url');
customStages.clear(); customStages.clear();
loadGeneration++;
try { try {
const files = await readdir(dir); const files = await readdir(dir);
for (const file of files) { for (const file of files) {
if (!file.endsWith('.js')) continue; if (!file.endsWith('.js') && !file.endsWith('.mjs')) continue;
const name = file.replace(/\.js$/, ''); const name = file.replace(/\.(m?js)$/, '');
try { try {
const mod = await import(pathToFileURL(join(dir, file)).href) as { default?: StageHandler }; // Append generation query to bust Node's import cache
const url = `${pathToFileURL(join(dir, file)).href}?v=${loadGeneration}`;
const mod = await import(url) as { default?: StageHandler };
if (typeof mod.default === 'function') { if (typeof mod.default === 'function') {
customStages.set(name, mod.default); customStages.set(name, mod.default);
} else { } else {

View File

@@ -0,0 +1,97 @@
/**
* File watcher for hot-reloading stages and proxymodels.
*
* Watches ~/.mcpctl/stages/ and ~/.mcpctl/proxymodels/ for changes.
* - Stage changes: clear + reload all custom stages (module cache busted)
* - ProxyModel changes: no action needed (loader reads from disk each time)
* but we log for visibility and notify subscribers.
*/
import { watch, type FSWatcher } from 'node:fs';
import { join } from 'node:path';
import { loadCustomStages } from './stage-registry.js';
const HOME = process.env['HOME'] ?? '/tmp';
const STAGES_DIR = join(HOME, '.mcpctl', 'stages');
const PROXYMODELS_DIR = join(HOME, '.mcpctl', 'proxymodels');
let stageWatcher: FSWatcher | null = null;
let modelWatcher: FSWatcher | null = null;
let reloadDebounce: ReturnType<typeof setTimeout> | null = null;
type ReloadListener = (what: 'stages' | 'proxymodels') => void;
const listeners = new Set<ReloadListener>();
/** Subscribe to reload events. Returns unsubscribe function. */
export function onReload(cb: ReloadListener): () => void {
listeners.add(cb);
return () => {
listeners.delete(cb);
};
}
function notify(what: 'stages' | 'proxymodels'): void {
for (const cb of listeners) {
try {
cb(what);
} catch {
// ignore
}
}
}
/**
* Reload custom stages from disk.
* Busts the Node module cache for changed files by appending a query string.
*/
export async function reloadStages(): Promise<{ loaded: number }> {
await loadCustomStages(STAGES_DIR);
const { listStages } = await import('./stage-registry.js');
const stages = listStages();
const localCount = stages.filter((s) => s.source === 'local').length;
console.info(`[watcher] Reloaded stages: ${localCount} custom, ${stages.length} total`);
notify('stages');
return { loaded: localCount };
}
/** Start file watchers. Safe to call multiple times (idempotent). */
export function startWatchers(): void {
if (!stageWatcher) {
try {
stageWatcher = watch(STAGES_DIR, { persistent: false }, (_eventType, filename) => {
if (!filename || (!filename.endsWith('.js') && !filename.endsWith('.mjs'))) return;
// Debounce: multiple events fire for a single save
if (reloadDebounce) clearTimeout(reloadDebounce);
reloadDebounce = setTimeout(() => {
console.info(`[watcher] Stage file changed: ${filename}`);
void reloadStages();
}, 300);
});
console.info(`[watcher] Watching ${STAGES_DIR} for stage changes`);
} catch {
// Directory doesn't exist — that's fine
}
}
if (!modelWatcher) {
try {
modelWatcher = watch(PROXYMODELS_DIR, { persistent: false }, (_eventType, filename) => {
if (!filename || (!filename.endsWith('.yaml') && !filename.endsWith('.yml'))) return;
console.info(`[watcher] ProxyModel file changed: ${filename} (will take effect on next use)`);
notify('proxymodels');
});
console.info(`[watcher] Watching ${PROXYMODELS_DIR} for proxymodel changes`);
} catch {
// Directory doesn't exist — that's fine
}
}
}
/** Stop file watchers. */
export function stopWatchers(): void {
if (reloadDebounce) clearTimeout(reloadDebounce);
stageWatcher?.close();
stageWatcher = null;
modelWatcher?.close();
modelWatcher = null;
}

View File

@@ -0,0 +1,413 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import * as fs from 'node:fs';
import * as path from 'node:path';
import * as os from 'node:os';
import { FileCache, parseMaxSize } from '../src/proxymodel/file-cache.js';
function makeTmpDir(): string {
return fs.mkdtempSync(path.join(os.tmpdir(), 'mcpctl-cache-test-'));
}
function rmrf(dir: string): void {
try { fs.rmSync(dir, { recursive: true, force: true }); } catch { /* ok */ }
}
describe('FileCache', () => {
let tmpDir: string;
beforeEach(() => {
tmpDir = makeTmpDir();
});
afterEach(() => {
rmrf(tmpDir);
});
// -- Basic get/set --
it('returns null on cache miss', async () => {
const cache = new FileCache('test-ns', { dir: tmpDir });
expect(await cache.get('missing')).toBeNull();
});
it('stores and retrieves values', async () => {
const cache = new FileCache('test-ns', { dir: tmpDir });
await cache.set('key1', 'hello world');
expect(await cache.get('key1')).toBe('hello world');
});
it('getOrCompute computes on miss, returns cached on hit', async () => {
const cache = new FileCache('test-ns', { dir: tmpDir });
let calls = 0;
const compute = async () => { calls++; return 'computed'; };
const v1 = await cache.getOrCompute('k', compute);
const v2 = await cache.getOrCompute('k', compute);
expect(v1).toBe('computed');
expect(v2).toBe('computed');
expect(calls).toBe(1);
});
it('hash produces consistent 16-char hex strings', () => {
const cache = new FileCache('test-ns', { dir: tmpDir });
const h1 = cache.hash('hello');
const h2 = cache.hash('hello');
const h3 = cache.hash('world');
expect(h1).toBe(h2);
expect(h1).not.toBe(h3);
expect(h1).toHaveLength(16);
expect(/^[0-9a-f]+$/.test(h1)).toBe(true);
});
// -- Persistence (L2 disk) --
it('persists values to disk across instances', async () => {
const cache1 = new FileCache('persist-ns', { dir: tmpDir });
await cache1.set('pk', 'persistent-value');
// New instance, same namespace — should find it on disk
const cache2 = new FileCache('persist-ns', { dir: tmpDir });
expect(await cache2.get('pk')).toBe('persistent-value');
});
it('creates .dat files on disk', async () => {
const cache = new FileCache('disk-ns', { dir: tmpDir });
await cache.set('mykey', 'data');
const nsDir = path.join(tmpDir, 'disk-ns');
const files = fs.readdirSync(nsDir).filter((f) => f.endsWith('.dat'));
expect(files.length).toBe(1);
});
// -- Namespace isolation --
it('different namespaces are isolated', async () => {
const cacheA = new FileCache('ns-alpha', { dir: tmpDir });
const cacheB = new FileCache('ns-beta', { dir: tmpDir });
await cacheA.set('shared-key', 'alpha-value');
await cacheB.set('shared-key', 'beta-value');
expect(await cacheA.get('shared-key')).toBe('alpha-value');
expect(await cacheB.get('shared-key')).toBe('beta-value');
});
it('provider--model--proxymodel namespaces are separate', async () => {
const ns1 = 'openai--gpt-4o--content-pipeline';
const ns2 = 'anthropic--claude-sonnet-4-20250514--content-pipeline';
const ns3 = 'openai--gpt-4o--default';
const c1 = new FileCache(ns1, { dir: tmpDir });
const c2 = new FileCache(ns2, { dir: tmpDir });
const c3 = new FileCache(ns3, { dir: tmpDir });
await c1.set('k', 'from-gpt4o-pipeline');
await c2.set('k', 'from-claude-pipeline');
await c3.set('k', 'from-gpt4o-default');
expect(await c1.get('k')).toBe('from-gpt4o-pipeline');
expect(await c2.get('k')).toBe('from-claude-pipeline');
expect(await c3.get('k')).toBe('from-gpt4o-default');
// Verify separate directories on disk
const dirs = fs.readdirSync(tmpDir);
expect(dirs.length).toBe(3);
});
// -- L1 memory cache --
it('L1 memory cache has LRU eviction', async () => {
const cache = new FileCache('lru-ns', { dir: tmpDir, maxMemoryEntries: 3 });
await cache.set('a', '1');
await cache.set('b', '2');
await cache.set('c', '3');
expect(cache.memorySize).toBe(3);
await cache.set('d', '4');
expect(cache.memorySize).toBe(3);
// 'a' evicted from memory but still on disk
cache.clearMemory();
expect(await cache.get('a')).toBe('1'); // restored from disk
});
it('get refreshes LRU position in memory', async () => {
const cache = new FileCache('lru2-ns', { dir: tmpDir, maxMemoryEntries: 3 });
await cache.set('a', '1');
await cache.set('b', '2');
await cache.set('c', '3');
// Access 'a' to refresh it
await cache.get('a');
// Adding 'd' should evict 'b' (now oldest), not 'a'
await cache.set('d', '4');
// 'a' should still be in memory
cache.clearMemory();
// All values still on disk regardless
expect(await cache.get('b')).toBe('2');
});
it('clearMemory only clears L1, not disk', async () => {
const cache = new FileCache('clear-ns', { dir: tmpDir });
await cache.set('k', 'val');
expect(cache.memorySize).toBe(1);
cache.clearMemory();
expect(cache.memorySize).toBe(0);
// Still on disk
expect(await cache.get('k')).toBe('val');
expect(cache.memorySize).toBe(1); // re-loaded into L1
});
// -- Static: stats --
it('stats returns empty for non-existent dir', () => {
const stats = FileCache.stats(path.join(tmpDir, 'nonexistent'));
expect(stats.totalEntries).toBe(0);
expect(stats.totalSize).toBe(0);
expect(stats.namespaces).toHaveLength(0);
});
it('stats reports per-namespace breakdown', async () => {
const c1 = new FileCache('ns-one', { dir: tmpDir });
const c2 = new FileCache('ns-two', { dir: tmpDir });
await c1.set('a', 'hello');
await c1.set('b', 'world');
await c2.set('x', 'data');
const stats = FileCache.stats(tmpDir);
expect(stats.totalEntries).toBe(3);
expect(stats.namespaces).toHaveLength(2);
const one = stats.namespaces.find((ns) => ns.name === 'ns-one');
const two = stats.namespaces.find((ns) => ns.name === 'ns-two');
expect(one?.entries).toBe(2);
expect(two?.entries).toBe(1);
expect(stats.totalSize).toBeGreaterThan(0);
});
// -- Static: clear --
it('clear removes all entries', async () => {
const c = new FileCache('clear-all', { dir: tmpDir });
await c.set('a', '1');
await c.set('b', '2');
const result = FileCache.clear({ rootDir: tmpDir });
expect(result.removed).toBe(2);
expect(result.freedBytes).toBeGreaterThan(0);
const stats = FileCache.stats(tmpDir);
expect(stats.totalEntries).toBe(0);
});
it('clear with namespace only removes that namespace', async () => {
const c1 = new FileCache('keep-me', { dir: tmpDir });
const c2 = new FileCache('delete-me', { dir: tmpDir });
await c1.set('a', '1');
await c2.set('b', '2');
const result = FileCache.clear({ rootDir: tmpDir, namespace: 'delete-me' });
expect(result.removed).toBe(1);
const stats = FileCache.stats(tmpDir);
expect(stats.totalEntries).toBe(1);
const withEntries = stats.namespaces.filter((ns) => ns.entries > 0);
expect(withEntries).toHaveLength(1);
expect(withEntries[0].name).toBe('keep-me');
});
// -- Static: cleanup (TTL + size limit) --
it('cleanup evicts entries exceeding maxSizeBytes', async () => {
// Create entries that exceed a 50-byte limit
const c = new FileCache('big-ns', { dir: tmpDir });
await c.set('a', 'x'.repeat(30));
await c.set('b', 'y'.repeat(30));
await c.set('c', 'z'.repeat(30));
const before = FileCache.stats(tmpDir);
expect(before.totalEntries).toBe(3);
// Cleanup with 50-byte limit (well below 90 bytes of content)
const result = FileCache.cleanup(tmpDir, 50, 365 * 24 * 60 * 60 * 1000);
expect(result.removed).toBeGreaterThan(0);
const after = FileCache.stats(tmpDir);
expect(after.totalSize).toBeLessThanOrEqual(50);
});
// -- Keys with special characters --
it('handles keys with colons and special chars', async () => {
const cache = new FileCache('special-ns', { dir: tmpDir });
await cache.set('summary:abc123:200', 'summarized content');
expect(await cache.get('summary:abc123:200')).toBe('summarized content');
});
it('handles very long keys', async () => {
const cache = new FileCache('long-ns', { dir: tmpDir });
const longKey = 'a'.repeat(500);
await cache.set(longKey, 'value');
expect(await cache.get(longKey)).toBe('value');
});
});
// -- parseMaxSize --
describe('parseMaxSize', () => {
it('passes through numbers directly', () => {
expect(parseMaxSize(1024)).toBe(1024);
expect(parseMaxSize(0)).toBe(0);
});
it('parses byte units', () => {
expect(parseMaxSize('100B')).toBe(100);
expect(parseMaxSize('1KB')).toBe(1024);
expect(parseMaxSize('256MB')).toBe(256 * 1024 * 1024);
expect(parseMaxSize('1GB')).toBe(1024 * 1024 * 1024);
expect(parseMaxSize('2TB')).toBe(2 * 1024 * 1024 * 1024 * 1024);
});
it('handles fractional values', () => {
expect(parseMaxSize('1.5GB')).toBe(Math.floor(1.5 * 1024 * 1024 * 1024));
expect(parseMaxSize('0.5MB')).toBe(Math.floor(0.5 * 1024 * 1024));
});
it('is case-insensitive', () => {
expect(parseMaxSize('256mb')).toBe(256 * 1024 * 1024);
expect(parseMaxSize('1gb')).toBe(1024 * 1024 * 1024);
expect(parseMaxSize('1Gb')).toBe(1024 * 1024 * 1024);
});
it('trims whitespace', () => {
expect(parseMaxSize(' 256MB ')).toBe(256 * 1024 * 1024);
expect(parseMaxSize(' 1 GB ')).toBe(1024 * 1024 * 1024);
});
it('parses plain number strings', () => {
expect(parseMaxSize('1048576')).toBe(1048576);
});
it('parses percentage (resolves against filesystem)', () => {
// We can't predict the exact value, but it should be a positive number
const result = parseMaxSize('10%', '/tmp');
expect(result).toBeGreaterThan(0);
expect(typeof result).toBe('number');
});
it('percentage of 100% equals full partition', () => {
const full = parseMaxSize('100%', '/tmp');
const half = parseMaxSize('50%', '/tmp');
// 50% should be roughly half of 100% (within rounding)
expect(Math.abs(half - full / 2)).toBeLessThan(1024);
});
it('throws on invalid specs', () => {
expect(() => parseMaxSize('abc')).toThrow();
expect(() => parseMaxSize('')).toThrow();
expect(() => parseMaxSize('0%')).toThrow();
expect(() => parseMaxSize('101%')).toThrow();
expect(() => parseMaxSize('-5MB')).toThrow();
});
});
// -- Namespace isolation for LLM provider/model/proxymodel combos --
describe('FileCache namespace isolation', () => {
let tmpDir: string;
beforeEach(() => {
tmpDir = makeTmpDir();
});
afterEach(() => {
rmrf(tmpDir);
});
const combos = [
{ provider: 'openai', model: 'gpt-4o', proxyModel: 'content-pipeline' },
{ provider: 'openai', model: 'gpt-4o-mini', proxyModel: 'content-pipeline' },
{ provider: 'anthropic', model: 'claude-sonnet-4-20250514', proxyModel: 'content-pipeline' },
{ provider: 'openai', model: 'gpt-4o', proxyModel: 'default' },
{ provider: 'vllm', model: 'qwen-72b', proxyModel: 'content-pipeline' },
];
it('each provider--model--proxymodel combo gets its own cache', async () => {
const caches = combos.map(
(c) => new FileCache(`${c.provider}--${c.model}--${c.proxyModel}`, { dir: tmpDir }),
);
// Write same key with different values to each cache
for (let i = 0; i < caches.length; i++) {
await caches[i].set('summary-key', `value-from-combo-${i}`);
}
// Each reads back its own value
for (let i = 0; i < caches.length; i++) {
expect(await caches[i].get('summary-key')).toBe(`value-from-combo-${i}`);
}
// Verify stats show correct number of namespaces
const stats = FileCache.stats(tmpDir);
expect(stats.namespaces).toHaveLength(combos.length);
expect(stats.totalEntries).toBe(combos.length);
});
it('changing only the model creates a separate cache', async () => {
const c1 = new FileCache('openai--gpt-4o--content-pipeline', { dir: tmpDir });
const c2 = new FileCache('openai--gpt-4o-mini--content-pipeline', { dir: tmpDir });
await c1.set('k', 'gpt4o-result');
await c2.set('k', 'mini-result');
expect(await c1.get('k')).toBe('gpt4o-result');
expect(await c2.get('k')).toBe('mini-result');
});
it('changing only the provider creates a separate cache', async () => {
const c1 = new FileCache('openai--gpt-4o--content-pipeline', { dir: tmpDir });
const c2 = new FileCache('anthropic--gpt-4o--content-pipeline', { dir: tmpDir });
await c1.set('k', 'openai-result');
await c2.set('k', 'anthropic-result');
expect(await c1.get('k')).toBe('openai-result');
expect(await c2.get('k')).toBe('anthropic-result');
});
it('changing only the proxyModel creates a separate cache', async () => {
const c1 = new FileCache('openai--gpt-4o--content-pipeline', { dir: tmpDir });
const c2 = new FileCache('openai--gpt-4o--default', { dir: tmpDir });
await c1.set('k', 'pipeline-result');
await c2.set('k', 'default-result');
expect(await c1.get('k')).toBe('pipeline-result');
expect(await c2.get('k')).toBe('default-result');
});
it('clearing one namespace leaves others intact', async () => {
const c1 = new FileCache('openai--gpt-4o--content-pipeline', { dir: tmpDir });
const c2 = new FileCache('anthropic--claude-sonnet-4-20250514--content-pipeline', { dir: tmpDir });
await c1.set('k', 'v1');
await c2.set('k', 'v2');
FileCache.clear({ rootDir: tmpDir, namespace: 'openai--gpt-4o--content-pipeline' });
// c1's namespace cleared, c2 intact
const fresh1 = new FileCache('openai--gpt-4o--content-pipeline', { dir: tmpDir });
const fresh2 = new FileCache('anthropic--claude-sonnet-4-20250514--content-pipeline', { dir: tmpDir });
expect(await fresh1.get('k')).toBeNull();
expect(await fresh2.get('k')).toBe('v2');
});
});

View File

@@ -0,0 +1,135 @@
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
import { mkdtempSync, writeFileSync, mkdirSync, rmSync, unlinkSync } from 'node:fs';
import { join } from 'node:path';
import { tmpdir } from 'node:os';
import { getStage, listStages, loadCustomStages, clearCustomStages } from '../src/proxymodel/stage-registry.js';
let tempDir: string;
beforeEach(() => {
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-hotreload-'));
clearCustomStages();
});
afterEach(() => {
clearCustomStages();
rmSync(tempDir, { recursive: true, force: true });
});
describe('Hot-reload: stage registry cache busting', () => {
it('loadCustomStages loads .js files', async () => {
writeFileSync(join(tempDir, 'echo.js'), `
export default async function(content, ctx) {
return { content: 'v1:' + content };
}
`);
await loadCustomStages(tempDir);
const handler = getStage('echo');
expect(handler).not.toBeNull();
const result = await handler!('hello', {} as Parameters<typeof handler>[1]);
expect(result.content).toBe('v1:hello');
});
it('reloading picks up file changes via cache busting', async () => {
writeFileSync(join(tempDir, 'transform.js'), `
export default async function(content) {
return { content: 'v1:' + content };
}
`);
await loadCustomStages(tempDir);
let handler = getStage('transform');
let result = await handler!('test', {} as Parameters<typeof handler>[1]);
expect(result.content).toBe('v1:test');
// Overwrite the file with a new version
writeFileSync(join(tempDir, 'transform.js'), `
export default async function(content) {
return { content: 'v2:' + content };
}
`);
// Reload — should pick up the new version due to cache busting
await loadCustomStages(tempDir);
handler = getStage('transform');
result = await handler!('test', {} as Parameters<typeof handler>[1]);
expect(result.content).toBe('v2:test');
});
it('removing a file removes the stage on reload', async () => {
writeFileSync(join(tempDir, 'temp.js'), `
export default async function(content) {
return { content };
}
`);
await loadCustomStages(tempDir);
expect(getStage('temp')).not.toBeNull();
unlinkSync(join(tempDir, 'temp.js'));
await loadCustomStages(tempDir);
expect(getStage('temp')).toBeNull();
});
it('adding a new file makes it available on reload', async () => {
await loadCustomStages(tempDir);
expect(getStage('newstage')).toBeNull();
writeFileSync(join(tempDir, 'newstage.js'), `
export default async function(content) {
return { content: 'new:' + content };
}
`);
await loadCustomStages(tempDir);
const handler = getStage('newstage');
expect(handler).not.toBeNull();
const result = await handler!('x', {} as Parameters<typeof handler>[1]);
expect(result.content).toBe('new:x');
});
it('syntax errors in stage files do not crash reload', async () => {
writeFileSync(join(tempDir, 'good.js'), `
export default async function(content) {
return { content };
}
`);
writeFileSync(join(tempDir, 'bad.js'), 'this is not valid javascript{{{');
await loadCustomStages(tempDir);
// Good stage should still load
expect(getStage('good')).not.toBeNull();
// Bad stage should not be present
expect(getStage('bad')).toBeNull();
});
it('supports .mjs files', async () => {
writeFileSync(join(tempDir, 'mjs-stage.mjs'), `
export default async function(content) {
return { content: 'mjs:' + content };
}
`);
await loadCustomStages(tempDir);
const handler = getStage('mjs-stage');
expect(handler).not.toBeNull();
const result = await handler!('hi', {} as Parameters<typeof handler>[1]);
expect(result.content).toBe('mjs:hi');
});
it('listStages shows custom stages as local', async () => {
writeFileSync(join(tempDir, 'custom.js'), `
export default async function(content) {
return { content };
}
`);
await loadCustomStages(tempDir);
const stages = listStages();
const custom = stages.find((s) => s.name === 'custom');
expect(custom).toBeDefined();
expect(custom!.source).toBe('local');
});
});

View File

@@ -0,0 +1,202 @@
import { describe, it, expect, beforeEach } from 'vitest';
import { pauseQueue } from '../src/proxymodel/pause-queue.js';
beforeEach(() => {
// Reset state between tests
pauseQueue.setPaused(false);
});
describe('PauseQueue', () => {
it('returns transformed content immediately when not paused', async () => {
const result = await pauseQueue.enqueue({
sessionId: 's1',
projectName: 'proj',
contentType: 'toolResult',
sourceName: 'test/tool',
original: 'raw content',
transformed: 'processed content',
});
expect(result).toBe('processed content');
expect(pauseQueue.size).toBe(0);
});
it('holds content when paused', async () => {
pauseQueue.setPaused(true);
let resolved = false;
const promise = pauseQueue.enqueue({
sessionId: 's1',
projectName: 'proj',
contentType: 'toolResult',
sourceName: 'test/tool',
original: 'raw',
transformed: 'processed',
}).then((r) => { resolved = true; return r; });
// Should not resolve immediately
await new Promise((r) => setTimeout(r, 50));
expect(resolved).toBe(false);
expect(pauseQueue.size).toBe(1);
// Release it
const items = pauseQueue.getItems();
expect(items).toHaveLength(1);
pauseQueue.releaseOne(items[0]!.id);
const result = await promise;
expect(result).toBe('processed');
expect(resolved).toBe(true);
expect(pauseQueue.size).toBe(0);
});
it('editAndRelease sends edited content', async () => {
pauseQueue.setPaused(true);
const promise = pauseQueue.enqueue({
sessionId: 's1',
projectName: 'proj',
contentType: 'toolResult',
sourceName: 'test/tool',
original: 'raw',
transformed: 'auto-processed',
});
const items = pauseQueue.getItems();
pauseQueue.editAndRelease(items[0]!.id, 'manually-edited');
const result = await promise;
expect(result).toBe('manually-edited');
});
it('dropOne sends empty content', async () => {
pauseQueue.setPaused(true);
const promise = pauseQueue.enqueue({
sessionId: 's1',
projectName: 'proj',
contentType: 'toolResult',
sourceName: 'test/tool',
original: 'raw',
transformed: 'processed',
});
const items = pauseQueue.getItems();
pauseQueue.dropOne(items[0]!.id);
const result = await promise;
expect(result).toBe('');
});
it('setPaused(false) releases all queued items', async () => {
pauseQueue.setPaused(true);
const p1 = pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool1', original: 'r1', transformed: 't1',
});
const p2 = pauseQueue.enqueue({
sessionId: 's2', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool2', original: 'r2', transformed: 't2',
});
expect(pauseQueue.size).toBe(2);
// Resume releases all
pauseQueue.setPaused(false);
const [r1, r2] = await Promise.all([p1, p2]);
expect(r1).toBe('t1');
expect(r2).toBe('t2');
expect(pauseQueue.size).toBe(0);
});
it('releaseAll releases all items and returns count', async () => {
pauseQueue.setPaused(true);
const p1 = pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool1', original: 'r1', transformed: 't1',
});
const p2 = pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool2', original: 'r2', transformed: 't2',
});
const count = pauseQueue.releaseAll();
expect(count).toBe(2);
const [r1, r2] = await Promise.all([p1, p2]);
expect(r1).toBe('t1');
expect(r2).toBe('t2');
});
it('returns false for operations on non-existent IDs', () => {
expect(pauseQueue.releaseOne('nonexistent')).toBe(false);
expect(pauseQueue.editAndRelease('nonexistent', 'x')).toBe(false);
expect(pauseQueue.dropOne('nonexistent')).toBe(false);
});
it('getItems strips internal resolve function', async () => {
pauseQueue.setPaused(true);
pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool', original: 'raw', transformed: 'processed',
});
const items = pauseQueue.getItems();
expect(items).toHaveLength(1);
const item = items[0]!;
expect(item.id).toBeTruthy();
expect(item.sessionId).toBe('s1');
expect(item.projectName).toBe('proj');
expect(item.contentType).toBe('toolResult');
expect(item.sourceName).toBe('tool');
expect(item.original).toBe('raw');
expect(item.transformed).toBe('processed');
expect(item.timestamp).toBeGreaterThan(0);
// Must not expose resolve
expect((item as Record<string, unknown>)['resolve']).toBeUndefined();
// Cleanup
pauseQueue.releaseAll();
});
it('notifies subscribers on enqueue and release', async () => {
const notifications: number[] = [];
const unsub = pauseQueue.subscribe((items) => notifications.push(items.length));
pauseQueue.setPaused(true);
pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool', original: 'r', transformed: 't',
});
expect(notifications).toContain(1);
pauseQueue.releaseAll();
expect(notifications).toContain(0);
unsub();
});
it('each item gets a unique ID', async () => {
pauseQueue.setPaused(true);
pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool1', original: 'r1', transformed: 't1',
});
pauseQueue.enqueue({
sessionId: 's1', projectName: 'proj', contentType: 'toolResult',
sourceName: 'tool2', original: 'r2', transformed: 't2',
});
const items = pauseQueue.getItems();
expect(items).toHaveLength(2);
expect(items[0]!.id).not.toBe(items[1]!.id);
pauseQueue.releaseAll();
});
});

View File

@@ -0,0 +1,126 @@
import { describe, it, expect, beforeAll } from 'vitest';
import http from 'node:http';
import { isMcplocalRunning, mcpctl } from './mcp-client.js';
const MCPLOCAL_URL = process.env['MCPLOCAL_URL'] ?? 'http://localhost:3200';
let available = false;
function fetchJson<T>(urlPath: string, method = 'GET'): Promise<T | null> {
return new Promise((resolve) => {
const req = http.request(`${MCPLOCAL_URL}${urlPath}`, { method, timeout: 5000 }, (res) => {
const chunks: Buffer[] = [];
res.on('data', (chunk: Buffer) => chunks.push(chunk));
res.on('end', () => {
try {
resolve(JSON.parse(Buffer.concat(chunks).toString()) as T);
} catch {
resolve(null);
}
});
});
req.on('error', () => resolve(null));
req.on('timeout', () => { req.destroy(); resolve(null); });
req.end();
});
}
beforeAll(async () => {
available = await isMcplocalRunning();
});
interface CacheStats {
rootDir: string;
totalSize: number;
totalEntries: number;
namespaces: Array<{ name: string; entries: number; size: number }>;
}
interface ClearResult {
removed: number;
freedBytes: number;
}
describe('Cache smoke tests', () => {
describe('mcplocal /cache endpoints', () => {
it('GET /cache/stats returns valid stats structure', async () => {
if (!available) return;
const stats = await fetchJson<CacheStats>('/cache/stats');
expect(stats).not.toBeNull();
expect(stats).toHaveProperty('rootDir');
expect(stats).toHaveProperty('totalSize');
expect(stats).toHaveProperty('totalEntries');
expect(stats).toHaveProperty('namespaces');
expect(Array.isArray(stats!.namespaces)).toBe(true);
expect(typeof stats!.totalSize).toBe('number');
expect(typeof stats!.totalEntries).toBe('number');
});
it('namespaces use provider--model--proxymodel format', async () => {
if (!available) return;
const stats = await fetchJson<CacheStats>('/cache/stats');
if (!stats || stats.namespaces.length === 0) return;
// Each namespace should contain -- separators
for (const ns of stats.namespaces) {
expect(ns.name).toBeTruthy();
expect(typeof ns.entries).toBe('number');
expect(typeof ns.size).toBe('number');
}
});
it('DELETE /cache returns clear result', async () => {
if (!available) return;
// This clears the cache, but it's non-destructive for a smoke test
const result = await fetchJson<ClearResult>('/cache', 'DELETE');
expect(result).not.toBeNull();
expect(result).toHaveProperty('removed');
expect(result).toHaveProperty('freedBytes');
expect(typeof result!.removed).toBe('number');
expect(typeof result!.freedBytes).toBe('number');
});
});
describe('mcpctl cache CLI', () => {
it('mcpctl cache stats shows cache statistics', async () => {
if (!available) return;
const output = await mcpctl('cache stats');
// Should either show table or "Cache is empty."
expect(output.length).toBeGreaterThan(0);
const hasTable = output.includes('NAMESPACE');
const isEmpty = output.includes('Cache is empty');
expect(hasTable || isEmpty).toBe(true);
});
it('mcpctl cache clear runs without error', async () => {
if (!available) return;
const output = await mcpctl('cache clear');
// Should report what was cleared, or that cache is empty
expect(output).toMatch(/[Cc]lear|empty/i);
});
});
describe('cache namespace isolation', () => {
it('stats show separate namespaces per llm provider/model/proxymodel combo', async () => {
if (!available) return;
// After any project MCP sessions have run, check that namespaces
// follow the provider--model--proxymodel convention
const stats = await fetchJson<CacheStats>('/cache/stats');
if (!stats || stats.namespaces.length === 0) return;
// Namespaces with -- separators indicate proper isolation
const separated = stats.namespaces.filter((ns) => ns.name.includes('--'));
// If there are namespaces, at least some should have the separator format
// (the 'dynamic' namespace from hot-swap is an exception)
if (stats.namespaces.length > 1) {
expect(separated.length).toBeGreaterThan(0);
}
});
});
});

View File

@@ -0,0 +1,64 @@
import { describe, it, expect, beforeAll } from 'vitest';
import http from 'node:http';
import { isMcplocalRunning } from './mcp-client.js';
const MCPLOCAL_URL = process.env['MCPLOCAL_URL'] ?? 'http://localhost:3200';
let available = false;
function fetchJson<T>(urlPath: string, method = 'GET', body?: unknown): Promise<T | null> {
return new Promise((resolve) => {
const payload = body !== undefined ? JSON.stringify(body) : undefined;
const req = http.request(`${MCPLOCAL_URL}${urlPath}`, {
method,
timeout: 5000,
headers: payload ? { 'Content-Type': 'application/json', 'Content-Length': Buffer.byteLength(payload) } : {},
}, (res) => {
const chunks: Buffer[] = [];
res.on('data', (chunk: Buffer) => chunks.push(chunk));
res.on('end', () => {
try {
resolve(JSON.parse(Buffer.concat(chunks).toString()) as T);
} catch {
resolve(null);
}
});
});
req.on('error', () => resolve(null));
req.on('timeout', () => { req.destroy(); resolve(null); });
if (payload) req.write(payload);
req.end();
});
}
beforeAll(async () => {
available = await isMcplocalRunning();
});
describe('Hot-reload smoke tests', () => {
describe('GET /proxymodels/stages', () => {
it('returns list of stages with source', async () => {
if (!available) return;
const stages = await fetchJson<Array<{ name: string; source: string }>>('/proxymodels/stages');
expect(stages).not.toBeNull();
expect(Array.isArray(stages)).toBe(true);
expect(stages!.length).toBeGreaterThan(0);
// Should have built-in stages
const passthrough = stages!.find((s) => s.name === 'passthrough');
expect(passthrough).toBeDefined();
expect(passthrough!.source).toBe('built-in');
});
});
describe('POST /proxymodels/reload', () => {
it('reloads stages and returns count', async () => {
if (!available) return;
const result = await fetchJson<{ loaded: number }>('/proxymodels/reload', 'POST');
expect(result).not.toBeNull();
expect(typeof result!.loaded).toBe('number');
});
});
});

View File

@@ -0,0 +1,143 @@
import { describe, it, expect, beforeAll, afterAll } from 'vitest';
import http from 'node:http';
import { isMcplocalRunning } from './mcp-client.js';
const MCPLOCAL_URL = process.env['MCPLOCAL_URL'] ?? 'http://localhost:3200';
let available = false;
function fetchJson<T>(urlPath: string, method = 'GET', body?: unknown): Promise<T | null> {
return new Promise((resolve) => {
const payload = body !== undefined ? JSON.stringify(body) : undefined;
const req = http.request(`${MCPLOCAL_URL}${urlPath}`, {
method,
timeout: 5000,
headers: payload ? { 'Content-Type': 'application/json', 'Content-Length': Buffer.byteLength(payload) } : {},
}, (res) => {
const chunks: Buffer[] = [];
res.on('data', (chunk: Buffer) => chunks.push(chunk));
res.on('end', () => {
try {
resolve(JSON.parse(Buffer.concat(chunks).toString()) as T);
} catch {
resolve(null);
}
});
});
req.on('error', () => resolve(null));
req.on('timeout', () => { req.destroy(); resolve(null); });
if (payload) req.write(payload);
req.end();
});
}
beforeAll(async () => {
available = await isMcplocalRunning();
});
afterAll(async () => {
// Always ensure pause is off after tests
if (available) {
await fetchJson('/pause', 'PUT', { paused: false });
}
});
describe('Pause Queue smoke tests', () => {
describe('GET /pause', () => {
it('returns pause state', async () => {
if (!available) return;
const state = await fetchJson<{ paused: boolean; queueSize: number }>('/pause');
expect(state).not.toBeNull();
expect(typeof state!.paused).toBe('boolean');
expect(typeof state!.queueSize).toBe('number');
});
});
describe('PUT /pause', () => {
it('can enable and disable pause mode', async () => {
if (!available) return;
// Enable
const on = await fetchJson<{ paused: boolean; queueSize: number }>('/pause', 'PUT', { paused: true });
expect(on).not.toBeNull();
expect(on!.paused).toBe(true);
// Verify
const state = await fetchJson<{ paused: boolean }>('/pause');
expect(state!.paused).toBe(true);
// Disable
const off = await fetchJson<{ paused: boolean; queueSize: number }>('/pause', 'PUT', { paused: false });
expect(off).not.toBeNull();
expect(off!.paused).toBe(false);
});
it('rejects non-boolean paused value', async () => {
if (!available) return;
const result = await fetchJson<{ error: string }>('/pause', 'PUT', { paused: 'yes' });
expect(result).not.toBeNull();
expect(result!.error).toBeTruthy();
});
});
describe('GET /pause/queue', () => {
it('returns empty queue when not paused', async () => {
if (!available) return;
const result = await fetchJson<{ paused: boolean; items: unknown[] }>('/pause/queue');
expect(result).not.toBeNull();
expect(Array.isArray(result!.items)).toBe(true);
});
});
describe('POST /pause/release-all', () => {
it('returns released count', async () => {
if (!available) return;
const result = await fetchJson<{ released: number; queueSize: number }>('/pause/release-all', 'POST');
expect(result).not.toBeNull();
expect(typeof result!.released).toBe('number');
expect(result!.queueSize).toBe(0);
});
});
describe('POST /pause/queue/:id/release', () => {
it('returns 404 for non-existent item', async () => {
if (!available) return;
const result = await fetchJson<{ error: string }>('/pause/queue/nonexistent/release', 'POST');
expect(result).not.toBeNull();
expect(result!.error).toMatch(/not found/i);
});
});
describe('POST /pause/queue/:id/edit', () => {
it('returns 404 for non-existent item', async () => {
if (!available) return;
const result = await fetchJson<{ error: string }>('/pause/queue/nonexistent/edit', 'POST', { content: 'test' });
expect(result).not.toBeNull();
expect(result!.error).toMatch(/not found/i);
});
it('rejects missing content', async () => {
if (!available) return;
const result = await fetchJson<{ error: string }>('/pause/queue/nonexistent/edit', 'POST', {});
expect(result).not.toBeNull();
expect(result!.error).toBeTruthy();
});
});
describe('POST /pause/queue/:id/drop', () => {
it('returns 404 for non-existent item', async () => {
if (!available) return;
const result = await fetchJson<{ error: string }>('/pause/queue/nonexistent/drop', 'POST');
expect(result).not.toBeNull();
expect(result!.error).toMatch(/not found/i);
});
});
});