2026-02-21 04:17:31 +00:00
|
|
|
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
|
|
|
|
import { mkdtempSync, rmSync } from 'node:fs';
|
|
|
|
|
import { join } from 'node:path';
|
|
|
|
|
import { tmpdir } from 'node:os';
|
|
|
|
|
import { createStatusCommand } from '../../src/commands/status.js';
|
2026-02-24 23:52:04 +00:00
|
|
|
import type { StatusCommandDeps } from '../../src/commands/status.js';
|
2026-02-21 04:17:31 +00:00
|
|
|
import { saveConfig, DEFAULT_CONFIG } from '../../src/config/index.js';
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
import { saveCredentials } from '../../src/auth/index.js';
|
2026-02-21 04:17:31 +00:00
|
|
|
|
|
|
|
|
let tempDir: string;
|
|
|
|
|
let output: string[];
|
2026-02-24 23:52:04 +00:00
|
|
|
let written: string[];
|
2026-02-21 04:17:31 +00:00
|
|
|
|
|
|
|
|
function log(...args: string[]) {
|
|
|
|
|
output.push(args.join(' '));
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-24 23:52:04 +00:00
|
|
|
function write(text: string) {
|
|
|
|
|
written.push(text);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function baseDeps(overrides?: Partial<StatusCommandDeps>): Partial<StatusCommandDeps> {
|
|
|
|
|
return {
|
|
|
|
|
configDeps: { configDir: tempDir },
|
|
|
|
|
credentialsDeps: { configDir: tempDir },
|
|
|
|
|
log,
|
|
|
|
|
write,
|
|
|
|
|
checkHealth: async () => true,
|
2026-02-25 02:16:08 +00:00
|
|
|
fetchProviders: async () => null,
|
2026-04-27 11:27:45 +01:00
|
|
|
fetchServerLlms: async () => null,
|
2026-04-27 12:02:00 +01:00
|
|
|
probeServerLlm: async () => ({ ok: true, ms: 12, say: 'hi' }),
|
2026-02-24 23:52:04 +00:00
|
|
|
isTTY: false,
|
|
|
|
|
...overrides,
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
2026-02-21 04:17:31 +00:00
|
|
|
beforeEach(() => {
|
|
|
|
|
tempDir = mkdtempSync(join(tmpdir(), 'mcpctl-status-test-'));
|
|
|
|
|
output = [];
|
2026-02-24 23:52:04 +00:00
|
|
|
written = [];
|
2026-02-21 04:17:31 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
afterEach(() => {
|
|
|
|
|
rmSync(tempDir, { recursive: true, force: true });
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
describe('status command', () => {
|
|
|
|
|
it('shows status in table format', async () => {
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps());
|
2026-02-21 04:17:31 +00:00
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
const out = output.join('\n');
|
|
|
|
|
expect(out).toContain('mcpctl v');
|
|
|
|
|
expect(out).toContain('mcplocal:');
|
|
|
|
|
expect(out).toContain('mcpd:');
|
|
|
|
|
expect(out).toContain('connected');
|
2026-02-21 04:17:31 +00:00
|
|
|
});
|
|
|
|
|
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
it('shows unreachable when daemons are down', async () => {
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps({ checkHealth: async () => false }));
|
2026-02-21 04:17:31 +00:00
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
expect(output.join('\n')).toContain('unreachable');
|
|
|
|
|
});
|
|
|
|
|
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
it('shows not logged in when no credentials', async () => {
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps());
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
expect(output.join('\n')).toContain('not logged in');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('shows logged in user when credentials exist', async () => {
|
|
|
|
|
saveCredentials({ token: 'tok', mcpdUrl: 'http://x:3100', user: 'alice@example.com' }, { configDir: tempDir });
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps());
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
expect(output.join('\n')).toContain('logged in as alice@example.com');
|
|
|
|
|
});
|
|
|
|
|
|
2026-02-21 04:17:31 +00:00
|
|
|
it('shows status in JSON format', async () => {
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps());
|
2026-02-21 04:17:31 +00:00
|
|
|
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
|
|
|
|
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
2026-02-27 17:05:05 +00:00
|
|
|
expect(parsed['version']).toBe('0.0.1');
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
expect(parsed['mcplocalReachable']).toBe(true);
|
|
|
|
|
expect(parsed['mcpdReachable']).toBe(true);
|
2026-02-21 04:17:31 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('shows status in YAML format', async () => {
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps({ checkHealth: async () => false }));
|
2026-02-21 04:17:31 +00:00
|
|
|
await cmd.parseAsync(['-o', 'yaml'], { from: 'user' });
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
expect(output[0]).toContain('mcplocalReachable: false');
|
2026-02-21 04:17:31 +00:00
|
|
|
});
|
|
|
|
|
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
it('checks correct URLs from config', async () => {
|
|
|
|
|
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://local:3200', mcpdUrl: 'http://remote:3100' }, { configDir: tempDir });
|
|
|
|
|
const checkedUrls: string[] = [];
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps({
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
checkHealth: async (url) => {
|
|
|
|
|
checkedUrls.push(url);
|
2026-02-21 04:17:31 +00:00
|
|
|
return false;
|
|
|
|
|
},
|
2026-02-24 23:52:04 +00:00
|
|
|
}));
|
2026-02-21 04:17:31 +00:00
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
feat: implement v2 3-tier architecture (mcpctl → mcplocal → mcpd)
- Rename local-proxy to mcplocal with HTTP server, LLM pipeline, mcpd discovery
- Add LLM pre-processing: token estimation, filter cache, metrics, Gemini CLI + DeepSeek providers
- Add mcpd auth (login/logout) and MCP proxy endpoints
- Update CLI: dual URLs (mcplocalUrl/mcpdUrl), auth commands, --direct flag
- Add tiered health monitoring, shell completions, e2e integration tests
- 57 test files, 597 tests passing
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-22 11:42:06 +00:00
|
|
|
expect(checkedUrls).toContain('http://local:3200');
|
|
|
|
|
expect(checkedUrls).toContain('http://remote:3100');
|
2026-02-21 04:17:31 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('shows registries from config', async () => {
|
|
|
|
|
saveConfig({ ...DEFAULT_CONFIG, registries: ['official'] }, { configDir: tempDir });
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps());
|
2026-02-21 04:17:31 +00:00
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
expect(output.join('\n')).toContain('official');
|
|
|
|
|
expect(output.join('\n')).not.toContain('glama');
|
|
|
|
|
});
|
2026-02-24 22:48:17 +00:00
|
|
|
|
|
|
|
|
it('shows LLM not configured hint when no LLM is set', async () => {
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps());
|
2026-02-24 22:48:17 +00:00
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
const out = output.join('\n');
|
|
|
|
|
expect(out).toContain('LLM:');
|
|
|
|
|
expect(out).toContain('not configured');
|
|
|
|
|
expect(out).toContain('mcpctl config setup');
|
|
|
|
|
});
|
|
|
|
|
|
2026-02-24 23:52:04 +00:00
|
|
|
it('shows green check when LLM is healthy (non-TTY)', async () => {
|
2026-02-24 22:48:17 +00:00
|
|
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'anthropic', model: 'claude-haiku-3-5-20241022' } }, { configDir: tempDir });
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps({ checkLlm: async () => 'ok' }));
|
2026-02-24 22:48:17 +00:00
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
const out = output.join('\n');
|
|
|
|
|
expect(out).toContain('anthropic / claude-haiku-3-5-20241022');
|
2026-02-24 23:52:04 +00:00
|
|
|
expect(out).toContain('✓ ok');
|
2026-02-24 23:24:31 +00:00
|
|
|
});
|
|
|
|
|
|
2026-02-24 23:52:04 +00:00
|
|
|
it('shows red cross when LLM check fails (non-TTY)', async () => {
|
2026-02-24 23:24:31 +00:00
|
|
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps({ checkLlm: async () => 'not authenticated' }));
|
2026-02-24 23:24:31 +00:00
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
const out = output.join('\n');
|
2026-02-24 23:52:04 +00:00
|
|
|
expect(out).toContain('✗ not authenticated');
|
|
|
|
|
});
|
|
|
|
|
|
2026-02-25 00:03:25 +00:00
|
|
|
it('shows error message from mcplocal', async () => {
|
2026-02-24 23:52:04 +00:00
|
|
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
|
|
|
|
const cmd = createStatusCommand(baseDeps({ checkLlm: async () => 'binary not found' }));
|
|
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
expect(output.join('\n')).toContain('✗ binary not found');
|
2026-02-24 23:24:31 +00:00
|
|
|
});
|
|
|
|
|
|
2026-02-25 00:03:25 +00:00
|
|
|
it('queries mcplocal URL for LLM health', async () => {
|
|
|
|
|
saveConfig({ ...DEFAULT_CONFIG, mcplocalUrl: 'http://custom:9999', llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
|
|
|
|
let queriedUrl = '';
|
|
|
|
|
const cmd = createStatusCommand(baseDeps({
|
|
|
|
|
checkLlm: async (url) => { queriedUrl = url; return 'ok'; },
|
|
|
|
|
}));
|
|
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
expect(queriedUrl).toBe('http://custom:9999');
|
|
|
|
|
});
|
|
|
|
|
|
2026-02-24 23:52:04 +00:00
|
|
|
it('uses spinner on TTY and writes final result', async () => {
|
2026-02-24 23:24:31 +00:00
|
|
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps({
|
|
|
|
|
isTTY: true,
|
|
|
|
|
checkLlm: async () => 'ok',
|
|
|
|
|
}));
|
2026-02-24 23:24:31 +00:00
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
2026-02-24 23:52:04 +00:00
|
|
|
// On TTY, the final LLM line goes through write(), not log()
|
|
|
|
|
const finalWrite = written[written.length - 1];
|
|
|
|
|
expect(finalWrite).toContain('gemini-cli / gemini-2.5-flash');
|
|
|
|
|
expect(finalWrite).toContain('✓ ok');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('uses spinner on TTY and shows failure', async () => {
|
|
|
|
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
|
|
|
|
const cmd = createStatusCommand(baseDeps({
|
|
|
|
|
isTTY: true,
|
|
|
|
|
checkLlm: async () => 'not authenticated',
|
|
|
|
|
}));
|
|
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
const finalWrite = written[written.length - 1];
|
|
|
|
|
expect(finalWrite).toContain('✗ not authenticated');
|
2026-02-24 22:48:17 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('shows not configured when LLM provider is none', async () => {
|
|
|
|
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'none' } }, { configDir: tempDir });
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps());
|
2026-02-24 22:48:17 +00:00
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
expect(output.join('\n')).toContain('not configured');
|
|
|
|
|
});
|
|
|
|
|
|
2026-02-24 23:24:31 +00:00
|
|
|
it('includes llm and llmStatus in JSON output', async () => {
|
2026-02-24 22:48:17 +00:00
|
|
|
saveConfig({ ...DEFAULT_CONFIG, llm: { provider: 'gemini-cli', model: 'gemini-2.5-flash' } }, { configDir: tempDir });
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps({ checkLlm: async () => 'ok' }));
|
2026-02-24 22:48:17 +00:00
|
|
|
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
|
|
|
|
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
|
|
|
|
expect(parsed['llm']).toBe('gemini-cli / gemini-2.5-flash');
|
2026-02-24 23:24:31 +00:00
|
|
|
expect(parsed['llmStatus']).toBe('ok');
|
2026-02-24 22:48:17 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('includes null llm in JSON output when not configured', async () => {
|
2026-02-24 23:52:04 +00:00
|
|
|
const cmd = createStatusCommand(baseDeps());
|
2026-02-24 22:48:17 +00:00
|
|
|
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
|
|
|
|
const parsed = JSON.parse(output[0]) as Record<string, unknown>;
|
|
|
|
|
expect(parsed['llm']).toBeNull();
|
2026-02-24 23:24:31 +00:00
|
|
|
expect(parsed['llmStatus']).toBeNull();
|
2026-02-24 22:48:17 +00:00
|
|
|
});
|
2026-04-27 11:27:45 +01:00
|
|
|
|
|
|
|
|
// ── Server LLMs (mcpd-managed Llm rows) ──
|
|
|
|
|
|
|
|
|
|
it('renders a "Server LLMs:" section grouped by tier in table mode', async () => {
|
|
|
|
|
saveCredentials({ token: 't', mcpdUrl: 'http://mcpd', user: 'u' }, { configDir: tempDir });
|
|
|
|
|
const cmd = createStatusCommand(baseDeps({
|
|
|
|
|
fetchServerLlms: async () => [
|
|
|
|
|
{ id: 'l1', name: 'qwen3-thinking', type: 'openai', model: 'qwen3-thinking', tier: 'fast', url: 'http://x:4000/v1', apiKeyRef: { name: 'litellm', key: 'API_KEY' } },
|
|
|
|
|
{ id: 'l2', name: 'sonnet', type: 'anthropic', model: 'claude-sonnet-4-5', tier: 'heavy', url: '', apiKeyRef: null },
|
|
|
|
|
],
|
2026-04-27 12:02:00 +01:00
|
|
|
probeServerLlm: async () => ({ ok: true, ms: 42, say: 'hi' }),
|
2026-04-27 11:27:45 +01:00
|
|
|
}));
|
|
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
const out = output.join('\n');
|
|
|
|
|
expect(out).toContain('Server LLMs: 2 registered');
|
2026-04-27 12:02:00 +01:00
|
|
|
expect(out).toContain('qwen3-thinking');
|
|
|
|
|
expect(out).toContain('openai → qwen3-thinking');
|
|
|
|
|
expect(out).toContain('sonnet');
|
|
|
|
|
expect(out).toContain('anthropic → claude-sonnet-4-5');
|
2026-04-27 11:27:45 +01:00
|
|
|
expect(out).toMatch(/fast\s+qwen3-thinking/);
|
|
|
|
|
expect(out).toMatch(/heavy\s+sonnet/);
|
2026-04-27 12:02:00 +01:00
|
|
|
// Health probe result rendered for each LLM
|
|
|
|
|
expect(out).toContain('✓ "hi" 42ms');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('renders a failed "say hi" probe with the error message', async () => {
|
|
|
|
|
const cmd = createStatusCommand(baseDeps({
|
|
|
|
|
fetchServerLlms: async () => [
|
|
|
|
|
{ id: 'l1', name: 'broken', type: 'openai', model: 'gpt-4o', tier: 'fast', url: 'http://x', apiKeyRef: null },
|
|
|
|
|
],
|
|
|
|
|
probeServerLlm: async () => ({ ok: false, ms: 5000, error: 'upstream auth failed: 401' }),
|
|
|
|
|
}));
|
|
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
const out = output.join('\n');
|
|
|
|
|
expect(out).toContain('Server LLMs: 1 registered');
|
|
|
|
|
expect(out).toContain('broken');
|
|
|
|
|
expect(out).toContain('✗ upstream auth failed: 401');
|
2026-04-27 11:27:45 +01:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('renders "none registered" when mcpd has no Llm rows', async () => {
|
|
|
|
|
const cmd = createStatusCommand(baseDeps({ fetchServerLlms: async () => [] }));
|
|
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
const out = output.join('\n');
|
|
|
|
|
expect(out).toContain('Server LLMs: none registered');
|
|
|
|
|
expect(out).toContain("'mcpctl create llm'");
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('omits the section silently when mcpd is unreachable (fetcher returns null)', async () => {
|
|
|
|
|
const cmd = createStatusCommand(baseDeps({ fetchServerLlms: async () => null }));
|
|
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
const out = output.join('\n');
|
|
|
|
|
expect(out).not.toContain('Server LLMs');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('passes the bearer token from saved credentials to the fetcher', async () => {
|
|
|
|
|
saveCredentials({ token: 'tok-abc', mcpdUrl: 'http://mcpd', user: 'u' }, { configDir: tempDir });
|
|
|
|
|
let capturedToken: string | null = '<unseen>';
|
|
|
|
|
const cmd = createStatusCommand(baseDeps({
|
|
|
|
|
fetchServerLlms: async (_url, token) => { capturedToken = token; return []; },
|
|
|
|
|
}));
|
|
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
expect(capturedToken).toBe('tok-abc');
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
it('passes null token when there are no saved credentials', async () => {
|
|
|
|
|
let capturedToken: string | null = '<unseen>';
|
|
|
|
|
const cmd = createStatusCommand(baseDeps({
|
|
|
|
|
fetchServerLlms: async (_url, token) => { capturedToken = token; return []; },
|
|
|
|
|
}));
|
|
|
|
|
await cmd.parseAsync([], { from: 'user' });
|
|
|
|
|
expect(capturedToken).toBeNull();
|
|
|
|
|
});
|
|
|
|
|
|
2026-04-27 12:02:00 +01:00
|
|
|
it('includes serverLlms with probed health in JSON output', async () => {
|
2026-04-27 11:27:45 +01:00
|
|
|
const llms = [
|
|
|
|
|
{ id: 'l1', name: 'qwen3-thinking', type: 'openai', model: 'qwen3-thinking', tier: 'fast', url: 'http://x', apiKeyRef: null },
|
|
|
|
|
];
|
2026-04-27 12:02:00 +01:00
|
|
|
const cmd = createStatusCommand(baseDeps({
|
|
|
|
|
fetchServerLlms: async () => llms,
|
|
|
|
|
probeServerLlm: async () => ({ ok: true, ms: 99, say: 'hi' }),
|
|
|
|
|
}));
|
2026-04-27 11:27:45 +01:00
|
|
|
await cmd.parseAsync(['-o', 'json'], { from: 'user' });
|
2026-04-27 12:02:00 +01:00
|
|
|
const parsed = JSON.parse(output[0]) as {
|
|
|
|
|
serverLlms?: Array<typeof llms[number] & { health: { ok: boolean; ms: number; say?: string } }>;
|
|
|
|
|
};
|
|
|
|
|
expect(parsed.serverLlms).toHaveLength(1);
|
|
|
|
|
expect(parsed.serverLlms![0]).toMatchObject({
|
|
|
|
|
name: 'qwen3-thinking',
|
|
|
|
|
health: { ok: true, ms: 99, say: 'hi' },
|
|
|
|
|
});
|
2026-04-27 11:27:45 +01:00
|
|
|
});
|
2026-02-21 04:17:31 +00:00
|
|
|
});
|