Files
mcpctl/src/mcplocal/tests/metrics.test.ts
Michal b8c5cf718a
Some checks failed
CI / lint (pull_request) Has been cancelled
CI / typecheck (pull_request) Has been cancelled
CI / test (pull_request) Has been cancelled
CI / build (pull_request) Has been cancelled
CI / package (pull_request) Has been cancelled
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00

94 lines
2.8 KiB
TypeScript

import { describe, it, expect } from 'vitest';
import { FilterMetrics } from '../src/llm/metrics.js';
describe('FilterMetrics', () => {
it('starts with zeroed stats', () => {
const m = new FilterMetrics();
const stats = m.getStats();
expect(stats.totalTokensProcessed).toBe(0);
expect(stats.tokensSaved).toBe(0);
expect(stats.cacheHits).toBe(0);
expect(stats.cacheMisses).toBe(0);
expect(stats.filterCount).toBe(0);
expect(stats.averageFilterLatencyMs).toBe(0);
});
it('records filter operations and accumulates tokens', () => {
const m = new FilterMetrics();
m.recordFilter(500, 200, 50);
m.recordFilter(300, 100, 30);
const stats = m.getStats();
expect(stats.totalTokensProcessed).toBe(800);
expect(stats.tokensSaved).toBe(500); // (500-200) + (300-100)
expect(stats.filterCount).toBe(2);
expect(stats.averageFilterLatencyMs).toBe(40); // (50+30)/2
});
it('does not allow negative token savings', () => {
const m = new FilterMetrics();
// Filtered output is larger than original (edge case)
m.recordFilter(100, 200, 10);
const stats = m.getStats();
expect(stats.totalTokensProcessed).toBe(100);
expect(stats.tokensSaved).toBe(0); // clamped to 0
});
it('records cache hits and misses independently', () => {
const m = new FilterMetrics();
m.recordCacheHit();
m.recordCacheHit();
m.recordCacheMiss();
const stats = m.getStats();
expect(stats.cacheHits).toBe(2);
expect(stats.cacheMisses).toBe(1);
});
it('computes average latency correctly', () => {
const m = new FilterMetrics();
m.recordFilter(100, 50, 10);
m.recordFilter(100, 50, 20);
m.recordFilter(100, 50, 30);
expect(m.getStats().averageFilterLatencyMs).toBe(20);
});
it('returns 0 average latency when no filter operations', () => {
const m = new FilterMetrics();
// Only cache operations, no filter calls
m.recordCacheHit();
expect(m.getStats().averageFilterLatencyMs).toBe(0);
});
it('resets all metrics to zero', () => {
const m = new FilterMetrics();
m.recordFilter(500, 200, 50);
m.recordCacheHit();
m.recordCacheMiss();
m.reset();
const stats = m.getStats();
expect(stats.totalTokensProcessed).toBe(0);
expect(stats.tokensSaved).toBe(0);
expect(stats.cacheHits).toBe(0);
expect(stats.cacheMisses).toBe(0);
expect(stats.filterCount).toBe(0);
expect(stats.averageFilterLatencyMs).toBe(0);
});
it('returns independent snapshots', () => {
const m = new FilterMetrics();
m.recordFilter(100, 50, 10);
const snap1 = m.getStats();
m.recordFilter(200, 100, 20);
const snap2 = m.getStats();
// snap1 should not have been mutated
expect(snap1.totalTokensProcessed).toBe(100);
expect(snap2.totalTokensProcessed).toBe(300);
});
});