feat(agents): mcpctl chat REPL + agent CRUD + completions (Stage 5)

This is the moment the user can actually talk to an agent end-to-end:

  mcpctl create llm qwen3-thinking --type openai --model qwen3-thinking \
    --url http://litellm.nvidia-nim.svc.cluster.local:4000/v1 \
    --api-key-ref litellm-key/API_KEY
  mcpctl create agent reviewer --llm qwen3-thinking --project mcpctl-dev \
    --description "I review security design — ask me after each major change."
  mcpctl chat reviewer

Pieces:

* src/cli/src/commands/chat.ts (new) — REPL + one-shot. Streams the SSE
  endpoint and prints text deltas to stdout as they arrive; tool_call /
  tool_result events go to stderr in dim-style brackets so the chat
  output stays clean. LiteLLM-style flags (--temperature / --top-p /
  --top-k / --max-tokens / --seed / --stop / --allow-tool / --extra)
  layer over agent.defaultParams. In-REPL slash-commands: /set KEY VAL,
  /system <text>, /tools (list project's MCP servers), /clear (new
  thread), /save (PATCH agent.defaultParams = current overrides),
  /quit.

* src/cli/src/commands/create.ts — `create agent` mirroring the llm
  pattern. Every yaml-applyable field has a corresponding flag (memory
  rule); --default-temperature / --default-top-p / --default-top-k /
  --default-max-tokens / --default-seed / --default-stop /
  --default-extra / --default-params-file all populate agent.defaultParams.

* src/cli/src/commands/apply.ts — AgentSpecSchema accepts both `llm:
  qwen3-thinking` shorthand and `llm: { name: ... }` long form; runs
  after llms in the apply order so apiKey/llm references resolve. Round-
  trips with `get agent foo -o yaml | apply -f -` (memory rule).

* src/cli/src/commands/get.ts — agentColumns (NAME, LLM, PROJECT,
  DESCRIPTION, ID); RESOURCE_KIND mapping for yaml export.

* src/cli/src/commands/shared.ts — `agent`/`agents`/`thread`/`threads`
  added to RESOURCE_ALIASES.

* src/cli/src/index.ts — wires createChatCommand into the program; passes
  the resolved baseUrl + token so chat can stream SSE without going
  through ApiClient (which only does buffered request/response).

* completions/mcpctl.{fish,bash} regenerated. scripts/generate-completions.ts
  knows about agents (canonical + aliases) and emits a special-case
  `chat)` block that completes the first arg with `mcpctl get agents`
  names. tests/completions.test.ts: +9 new assertions covering agents in
  the resource list, chat in the commands list, --llm flag for create
  agent, agent-name completion for chat, etc.

CLI suite: 430/430 (was 421). Completions --check is clean.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Michal
2026-04-25 17:02:38 +01:00
parent 285be11dd5
commit 727e7d628c
10 changed files with 701 additions and 13 deletions

View File

@@ -63,6 +63,42 @@ const LlmSpecSchema = z.object({
extraConfig: z.record(z.unknown()).default({}),
});
const AgentChatParamsAppliedSchema = z.object({
temperature: z.number().optional(),
top_p: z.number().optional(),
top_k: z.number().int().optional(),
max_tokens: z.number().int().optional(),
stop: z.union([z.string(), z.array(z.string())]).optional(),
presence_penalty: z.number().optional(),
frequency_penalty: z.number().optional(),
seed: z.number().int().optional(),
response_format: z.record(z.unknown()).optional(),
tool_choice: z.unknown().optional(),
tools_allowlist: z.array(z.string()).optional(),
systemOverride: z.string().optional(),
systemAppend: z.string().optional(),
extra: z.record(z.unknown()).optional(),
}).strict();
const AgentSpecSchema = z.object({
name: z.string().min(1).max(100).regex(/^[a-z0-9-]+$/),
description: z.string().max(500).default(''),
systemPrompt: z.string().default(''),
llm: z.union([
z.object({ name: z.string().min(1) }),
z.object({ id: z.string().min(1) }),
// Allow string shorthand: `llm: qwen3-thinking` → `{ name: 'qwen3-thinking' }`
z.string().min(1).transform((name) => ({ name })),
]),
project: z.union([
z.object({ name: z.string().min(1) }),
z.string().min(1).transform((name) => ({ name })),
]).optional(),
proxyModelName: z.string().optional(),
defaultParams: AgentChatParamsAppliedSchema.default({}),
extras: z.record(z.unknown()).default({}),
});
const TemplateEnvEntrySchema = z.object({
name: z.string().min(1),
description: z.string().optional(),
@@ -172,6 +208,7 @@ const ApplyConfigSchema = z.object({
secretbackends: z.array(SecretBackendSpecSchema).default([]),
secrets: z.array(SecretSpecSchema).default([]),
llms: z.array(LlmSpecSchema).default([]),
agents: z.array(AgentSpecSchema).default([]),
servers: z.array(ServerSpecSchema).default([]),
users: z.array(UserSpecSchema).default([]),
groups: z.array(GroupSpecSchema).default([]),
@@ -215,6 +252,7 @@ export function createApplyCommand(deps: ApplyCommandDeps): Command {
if (config.secretbackends.length > 0) log(` ${config.secretbackends.length} secretbackend(s)`);
if (config.secrets.length > 0) log(` ${config.secrets.length} secret(s)`);
if (config.llms.length > 0) log(` ${config.llms.length} llm(s)`);
if (config.agents.length > 0) log(` ${config.agents.length} agent(s)`);
if (config.servers.length > 0) log(` ${config.servers.length} server(s)`);
if (config.users.length > 0) log(` ${config.users.length} user(s)`);
if (config.groups.length > 0) log(` ${config.groups.length} group(s)`);
@@ -262,6 +300,7 @@ const KIND_TO_RESOURCE: Record<string, string> = {
mcptoken: 'mcptokens',
secretbackend: 'secretbackends',
llm: 'llms',
agent: 'agents',
};
/**
@@ -434,6 +473,24 @@ async function applyConfig(client: ApiClient, config: ApplyConfig, log: (...args
}
}
// Apply agents (after llms — agent.llm references an existing Llm by name)
for (const agent of config.agents) {
try {
const existing = await cachedFindByName('agents', agent.name);
if (existing) {
const { name: _n, ...updateBody } = agent;
await withRetry(() => client.put(`/api/v1/agents/${existing.id}`, updateBody));
log(`Updated agent: ${agent.name}`);
} else {
await withRetry(() => client.post('/api/v1/agents', agent));
invalidateCache('agents');
log(`Created agent: ${agent.name}`);
}
} catch (err) {
log(`Error applying agent '${agent.name}': ${err instanceof Error ? err.message : err}`);
}
}
// Apply users (matched by email)
for (const user of config.users) {
try {

View File

@@ -0,0 +1,409 @@
/**
* `mcpctl chat <agent>` — interactive REPL + one-shot mode.
*
* Streams the agent's response over SSE so the user sees text appear as it's
* generated. Tool calls and tool results print to stderr in dim style so the
* REPL output stays clean. LiteLLM-style flags (--temperature, --max-tokens,
* --system, etc.) override the agent's defaultParams for this session only;
* use the in-REPL `/save` slash-command to persist them back to the agent.
*
* Modes:
* mcpctl chat <agent> # REPL, new thread
* mcpctl chat <agent> --thread <id> # REPL, resume thread
* mcpctl chat <agent> -m "hi" # one-shot, prints reply, no REPL
*
* Slash-commands inside the REPL:
* /set KEY VALUE # adjust an override (temperature 0.2)
* /system <text> # set systemAppend for this turn onward
* /tools # list tools the agent can call
* /clear # start a fresh thread (same agent)
* /save # PATCH agent.defaultParams = current overrides
* /quit # exit
*/
import { Command } from 'commander';
import http from 'node:http';
import https from 'node:https';
import readline from 'node:readline';
import { promises as fs } from 'node:fs';
import type { ApiClient } from '../api-client.js';
const STREAM_TIMEOUT_MS = 600_000; // 10 minutes — agent turns can include long tool calls
export interface ChatCommandDeps {
client: ApiClient;
baseUrl: string;
token?: string | undefined;
log: (...args: unknown[]) => void;
}
export function createChatCommand(deps: ChatCommandDeps): Command {
return new Command('chat')
.description('Open an interactive chat session with an agent (REPL or one-shot).')
.argument('<agent>', 'Agent name (see `mcpctl get agents`)')
.option('-m, --message <text>', 'One-shot: send a single message and exit (no REPL)')
.option('--thread <id>', 'Resume an existing thread')
.option('--system <text>', 'Replace agent.systemPrompt for this session')
.option('--system-file <path>', 'Read --system text from a file')
.option('--system-append <text>', 'Append to the agent system block for this session')
.option('--temperature <n>', 'Sampling temperature (0..2)', parseFloat)
.option('--top-p <n>', 'Nucleus sampling cutoff (0..1)', parseFloat)
.option('--top-k <n>', 'Top-K sampling (Anthropic; OpenAI ignores)', parseFloatInt)
.option('--max-tokens <n>', 'Maximum tokens in the assistant reply', parseFloatInt)
.option('--seed <n>', 'Reproducibility seed (provider-dependent)', parseFloatInt)
.option('--stop <text>', 'Stop sequence (repeatable)', collect, [])
.option('--allow-tool <name>', 'Restrict to this tool only (repeatable)', collect, [])
.option('--extra <kv>', 'Provider-specific knob k=v (repeatable)', collect, [])
.option('--no-stream', 'Disable SSE streaming (single JSON response)')
.action(async (agent: string, opts: ChatOpts) => {
const overrides = await buildInitialOverrides(opts);
if (opts.message !== undefined) {
await runOneShot(deps, agent, opts.message, opts.thread, overrides, opts.stream);
return;
}
await runRepl(deps, agent, opts.thread, overrides, opts.stream);
});
}
interface ChatOpts {
message?: string;
thread?: string;
system?: string;
systemFile?: string;
systemAppend?: string;
temperature?: number;
topP?: number;
topK?: number;
maxTokens?: number;
seed?: number;
stop?: string[];
allowTool?: string[];
extra?: string[];
stream?: boolean;
}
interface Overrides {
systemOverride?: string;
systemAppend?: string;
temperature?: number;
top_p?: number;
top_k?: number;
max_tokens?: number;
seed?: number;
stop?: string[];
tools_allowlist?: string[];
extra?: Record<string, unknown>;
}
async function buildInitialOverrides(opts: ChatOpts): Promise<Overrides> {
const out: Overrides = {};
let system = opts.system;
if (system === undefined && opts.systemFile !== undefined) {
system = (await fs.readFile(opts.systemFile, 'utf-8')).trim();
}
if (system !== undefined) out.systemOverride = system;
if (opts.systemAppend !== undefined) out.systemAppend = opts.systemAppend;
if (opts.temperature !== undefined) out.temperature = opts.temperature;
if (opts.topP !== undefined) out.top_p = opts.topP;
if (opts.topK !== undefined) out.top_k = opts.topK;
if (opts.maxTokens !== undefined) out.max_tokens = opts.maxTokens;
if (opts.seed !== undefined) out.seed = opts.seed;
if (opts.stop !== undefined && opts.stop.length > 0) out.stop = opts.stop;
if (opts.allowTool !== undefined && opts.allowTool.length > 0) out.tools_allowlist = opts.allowTool;
if (opts.extra !== undefined && opts.extra.length > 0) {
const extra: Record<string, unknown> = {};
for (const kv of opts.extra) {
const eq = kv.indexOf('=');
if (eq < 1) throw new Error(`--extra '${kv}' must be key=value`);
extra[kv.slice(0, eq)] = parseExtraValue(kv.slice(eq + 1));
}
out.extra = extra;
}
return out;
}
function parseExtraValue(raw: string): unknown {
if (raw === 'true') return true;
if (raw === 'false') return false;
if (raw === 'null') return null;
if (/^-?\d+(\.\d+)?$/.test(raw)) return Number(raw);
return raw;
}
async function runOneShot(
deps: ChatCommandDeps,
agent: string,
message: string,
threadId: string | undefined,
overrides: Overrides,
stream: boolean | undefined,
): Promise<void> {
if (stream === false) {
const body: Record<string, unknown> = { message, ...overrides };
if (threadId !== undefined) body.threadId = threadId;
const res = await deps.client.post<{ assistant: string; threadId: string; turnIndex: number }>(
`/api/v1/agents/${encodeURIComponent(agent)}/chat`,
body,
);
process.stdout.write(`${res.assistant}\n`);
process.stderr.write(`(thread: ${res.threadId})\n`);
return;
}
const finalThread = await streamOnce(deps, agent, message, threadId, overrides);
process.stderr.write(`\n(thread: ${finalThread})\n`);
}
async function runRepl(
deps: ChatCommandDeps,
agent: string,
initialThread: string | undefined,
initialOverrides: Overrides,
stream: boolean | undefined,
): Promise<void> {
const overrides: Overrides = { ...initialOverrides };
let threadId = initialThread;
const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
const ask = (q: string): Promise<string> => new Promise((resolve) => rl.question(q, resolve));
process.stderr.write(`Chat with agent '${agent}'. Slash commands: /set /system /tools /clear /save /quit. Ctrl-D to exit.\n`);
if (threadId !== undefined) {
process.stderr.write(`(resuming thread ${threadId})\n`);
}
while (true) {
let line: string;
try {
line = await ask('> ');
} catch {
break;
}
if (line === '') continue;
if (line.startsWith('/')) {
const handled = await handleSlash(line, deps, agent, overrides, () => { threadId = undefined; });
if (handled === 'quit') break;
continue;
}
try {
if (stream === false) {
const body: Record<string, unknown> = { message: line, ...overrides };
if (threadId !== undefined) body.threadId = threadId;
const res = await deps.client.post<{ assistant: string; threadId: string }>(
`/api/v1/agents/${encodeURIComponent(agent)}/chat`,
body,
);
threadId = res.threadId;
process.stdout.write(`${res.assistant}\n`);
} else {
threadId = await streamOnce(deps, agent, line, threadId, overrides);
process.stdout.write('\n');
}
} catch (err) {
process.stderr.write(`error: ${(err as Error).message}\n`);
}
}
rl.close();
}
async function handleSlash(
raw: string,
deps: ChatCommandDeps,
agent: string,
overrides: Overrides,
resetThread: () => void,
): Promise<'quit' | 'continue'> {
const [cmd, ...rest] = raw.slice(1).split(/\s+/);
switch (cmd) {
case 'quit':
case 'exit':
return 'quit';
case 'clear':
resetThread();
process.stderr.write('(new thread\n)');
return 'continue';
case 'system': {
const text = rest.join(' ');
if (text === '') {
delete overrides.systemAppend;
process.stderr.write('(systemAppend cleared)\n');
} else {
overrides.systemAppend = text;
process.stderr.write('(systemAppend set)\n');
}
return 'continue';
}
case 'set': {
const [key, ...vparts] = rest;
if (key === undefined || vparts.length === 0) {
process.stderr.write('usage: /set KEY VALUE\n');
return 'continue';
}
applySetCommand(overrides, key, vparts.join(' '));
process.stderr.write(`(${key}=${vparts.join(' ')})\n`);
return 'continue';
}
case 'tools': {
try {
const a = await deps.client.get<{ project: { name: string } | null }>(
`/api/v1/agents/${encodeURIComponent(agent)}`,
);
if (a.project === null) {
process.stderr.write('(agent has no project — no tools available)\n');
return 'continue';
}
const servers = await deps.client.get<Array<{ server: { name: string } }>>(
`/api/v1/projects/${encodeURIComponent(a.project.name)}/servers`,
);
if (servers.length === 0) {
process.stderr.write('(project has no MCP servers attached)\n');
} else {
for (const s of servers) {
process.stderr.write(` ${s.server.name}\n`);
}
}
} catch (err) {
process.stderr.write(`error listing tools: ${(err as Error).message}\n`);
}
return 'continue';
}
case 'save': {
try {
await deps.client.put(`/api/v1/agents/${encodeURIComponent(agent)}`, {
defaultParams: stripSession(overrides),
});
process.stderr.write('(saved current overrides as agent.defaultParams)\n');
} catch (err) {
process.stderr.write(`error saving: ${(err as Error).message}\n`);
}
return 'continue';
}
default:
process.stderr.write(`unknown command: /${cmd ?? ''}\n`);
return 'continue';
}
}
function stripSession(o: Overrides): Record<string, unknown> {
// /save persists sampling defaults but not the per-session systemOverride / systemAppend.
const out: Record<string, unknown> = { ...o };
delete out.systemOverride;
delete out.systemAppend;
return out;
}
function applySetCommand(o: Overrides, key: string, valueRaw: string): void {
const num = Number(valueRaw);
switch (key) {
case 'temperature': o.temperature = num; break;
case 'top_p': case 'top-p': o.top_p = num; break;
case 'top_k': case 'top-k': o.top_k = Math.trunc(num); break;
case 'max_tokens': case 'max-tokens': o.max_tokens = Math.trunc(num); break;
case 'seed': o.seed = Math.trunc(num); break;
case 'stop': o.stop = [valueRaw]; break;
default:
// Anything unknown drops into `extra` so the user can still pass it.
o.extra = { ...(o.extra ?? {}), [key]: parseExtraValue(valueRaw) };
}
}
/** Stream a single chat call. Returns the resolved threadId. */
async function streamOnce(
deps: ChatCommandDeps,
agent: string,
message: string,
threadId: string | undefined,
overrides: Overrides,
): Promise<string> {
const url = new URL(`${deps.baseUrl}/api/v1/agents/${encodeURIComponent(agent)}/chat`);
const body = JSON.stringify({ message, threadId, stream: true, ...overrides });
return new Promise<string>((resolve, reject) => {
const driver = url.protocol === 'https:' ? https : http;
const req = driver.request({
hostname: url.hostname,
port: url.port || (url.protocol === 'https:' ? 443 : 80),
path: url.pathname + url.search,
method: 'POST',
timeout: STREAM_TIMEOUT_MS,
headers: {
'Content-Type': 'application/json',
...(deps.token !== undefined ? { Authorization: `Bearer ${deps.token}` } : {}),
},
}, (res) => {
const status = res.statusCode ?? 0;
if (status >= 400) {
const chunks: Buffer[] = [];
res.on('data', (c: Buffer) => chunks.push(c));
res.on('end', () => reject(new Error(`HTTP ${String(status)}: ${Buffer.concat(chunks).toString('utf-8')}`)));
return;
}
let buf = '';
let resolvedThread = threadId ?? '';
res.setEncoding('utf-8');
res.on('data', (chunk: string) => {
buf += chunk;
let nl: number;
while ((nl = buf.indexOf('\n\n')) !== -1) {
const frame = buf.slice(0, nl);
buf = buf.slice(nl + 2);
for (const line of frame.split('\n')) {
if (!line.startsWith('data: ')) continue;
const data = line.slice(6);
if (data === '[DONE]') continue;
try {
const evt = JSON.parse(data) as ChatStreamFrame;
switch (evt.type) {
case 'text':
if (typeof evt.delta === 'string') process.stdout.write(evt.delta);
break;
case 'tool_call':
process.stderr.write(`\n[tool_call: ${evt.toolName ?? ''}]\n`);
break;
case 'tool_result':
process.stderr.write(`[tool_result: ${evt.toolName ?? ''} ${evt.ok === false ? 'FAIL' : 'ok'}]\n`);
break;
case 'final':
if (evt.threadId !== undefined) resolvedThread = evt.threadId;
break;
case 'error':
process.stderr.write(`\n[error: ${evt.message ?? ''}]\n`);
break;
}
} catch {
// ignore malformed frames
}
}
}
});
res.on('end', () => resolve(resolvedThread));
res.on('error', reject);
});
req.on('error', reject);
req.on('timeout', () => {
req.destroy();
reject(new Error('chat stream timed out'));
});
req.write(body);
req.end();
});
}
interface ChatStreamFrame {
type: 'text' | 'tool_call' | 'tool_result' | 'final' | 'error';
delta?: string;
toolName?: string;
ok?: boolean;
threadId?: string;
turnIndex?: number;
message?: string;
}
function collect(value: string, prev: string[]): string[] {
return [...prev, value];
}
function parseFloatInt(value: string): number {
const n = Number(value);
if (!Number.isInteger(n)) throw new Error(`expected integer, got '${value}'`);
return n;
}

View File

@@ -88,7 +88,7 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
const { client, log } = deps;
const cmd = new Command('create')
.description('Create a resource (server, secret, secretbackend, llm, project, user, group, rbac, serverattachment, prompt)');
.description('Create a resource (server, secret, secretbackend, llm, agent, project, user, group, rbac, serverattachment, prompt)');
// --- create server ---
cmd.command('server')
@@ -307,6 +307,81 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
}
});
// --- create agent ---
cmd.command('agent')
.description('Create an Agent (LLM persona pinned to an Llm, optionally attached to a Project)')
.argument('<name>', 'Agent name (lowercase alphanumeric with hyphens)')
.requiredOption('--llm <name>', 'Pinned Llm (see `mcpctl get llms`)')
.option('--project <name>', 'Attach to this Project (optional)')
.option('--description <text>', 'Description (shown in MCP tools/list)')
.option('--system-prompt <text>', 'System prompt (persona)')
.option('--system-prompt-file <path>', 'Read system prompt from a file')
.option('--proxy-model <name>', 'Optional proxyModel name override (informational)')
.option('--default-temperature <n>', 'Default sampling temperature', parseFloat)
.option('--default-top-p <n>', 'Default top_p', parseFloat)
.option('--default-top-k <n>', 'Default top_k', (s: string) => parseInt(s, 10))
.option('--default-max-tokens <n>', 'Default max_tokens', (s: string) => parseInt(s, 10))
.option('--default-seed <n>', 'Default seed', (s: string) => parseInt(s, 10))
.option('--default-stop <text>', 'Default stop sequence (repeat for multiple)', collect, [])
.option('--default-extra <kv>', 'Default provider-specific knob k=v (repeat)', collect, [])
.option('--default-params-file <path>', 'Read defaultParams from a JSON file')
.option('--force', 'Update if already exists')
.action(async (name: string, opts) => {
const body: Record<string, unknown> = {
name,
llm: { name: opts.llm },
};
if (opts.project) body.project = { name: opts.project };
if (opts.description !== undefined) body.description = opts.description;
let systemPrompt = opts.systemPrompt as string | undefined;
if (systemPrompt === undefined && opts.systemPromptFile !== undefined) {
const fs = await import('node:fs/promises');
systemPrompt = (await fs.readFile(opts.systemPromptFile as string, 'utf-8')).trim();
}
if (systemPrompt !== undefined) body.systemPrompt = systemPrompt;
if (opts.proxyModel !== undefined) body.proxyModelName = opts.proxyModel;
let defaults: Record<string, unknown> = {};
if (opts.defaultParamsFile !== undefined) {
const fs = await import('node:fs/promises');
defaults = JSON.parse(await fs.readFile(opts.defaultParamsFile as string, 'utf-8')) as Record<string, unknown>;
}
if (opts.defaultTemperature !== undefined) defaults.temperature = opts.defaultTemperature;
if (opts.defaultTopP !== undefined) defaults.top_p = opts.defaultTopP;
if (opts.defaultTopK !== undefined) defaults.top_k = opts.defaultTopK;
if (opts.defaultMaxTokens !== undefined) defaults.max_tokens = opts.defaultMaxTokens;
if (opts.defaultSeed !== undefined) defaults.seed = opts.defaultSeed;
if (opts.defaultStop && (opts.defaultStop as string[]).length > 0) {
defaults.stop = (opts.defaultStop as string[]).length === 1 ? (opts.defaultStop as string[])[0] : opts.defaultStop;
}
if (opts.defaultExtra && (opts.defaultExtra as string[]).length > 0) {
const extra: Record<string, unknown> = (defaults.extra as Record<string, unknown> | undefined) ?? {};
for (const kv of opts.defaultExtra as string[]) {
const eq = (kv as string).indexOf('=');
if (eq < 1) throw new Error(`--default-extra '${kv}' must be key=value`);
extra[(kv as string).slice(0, eq)] = (kv as string).slice(eq + 1);
}
defaults.extra = extra;
}
if (Object.keys(defaults).length > 0) body.defaultParams = defaults;
try {
const row = await client.post<{ id: string; name: string }>('/api/v1/agents', body);
log(`agent '${row.name}' created (id: ${row.id})`);
} catch (err) {
if (err instanceof ApiError && err.status === 409 && opts.force) {
const existing = (await client.get<Array<{ id: string; name: string }>>('/api/v1/agents')).find((a) => a.name === name);
if (!existing) throw err;
const { name: _n, ...updateBody } = body;
await client.put(`/api/v1/agents/${existing.id}`, updateBody);
log(`agent '${name}' updated (id: ${existing.id})`);
} else {
throw err;
}
}
});
// --- create secretbackend ---
cmd.command('secretbackend')
.alias('sb')

View File

@@ -143,6 +143,27 @@ const llmColumns: Column<LlmRow>[] = [
{ header: 'ID', key: 'id' },
];
interface AgentRow {
id: string;
name: string;
description: string;
llm: { id: string; name: string };
project: { id: string; name: string } | null;
}
const agentColumns: Column<AgentRow>[] = [
{ header: 'NAME', key: 'name' },
{ header: 'LLM', key: (r) => r.llm.name, width: 24 },
{ header: 'PROJECT', key: (r) => r.project?.name ?? '-', width: 20 },
{ header: 'DESCRIPTION', key: (r) => truncate(r.description, 50) || '-', width: 50 },
{ header: 'ID', key: 'id' },
];
function truncate(s: string, max: number): string {
if (s.length <= max) return s;
return s.slice(0, max - 1) + '…';
}
interface SecretBackendRow {
id: string;
name: string;
@@ -322,6 +343,8 @@ function getColumnsForResource(resource: string): Column<Record<string, unknown>
return secretBackendColumns as unknown as Column<Record<string, unknown>>[];
case 'llms':
return llmColumns as unknown as Column<Record<string, unknown>>[];
case 'agents':
return agentColumns as unknown as Column<Record<string, unknown>>[];
default:
return [
{ header: 'ID', key: 'id' as keyof Record<string, unknown> },
@@ -346,6 +369,7 @@ const RESOURCE_KIND: Record<string, string> = {
mcptokens: 'mcptoken',
secretbackends: 'secretbackend',
llms: 'llm',
agents: 'agent',
};
/**

View File

@@ -36,6 +36,10 @@ export const RESOURCE_ALIASES: Record<string, string> = {
sb: 'secretbackends',
llm: 'llms',
llms: 'llms',
agent: 'agents',
agents: 'agents',
thread: 'threads',
threads: 'threads',
all: 'all',
};

View File

@@ -18,6 +18,7 @@ import { createMcpCommand } from './commands/mcp.js';
import { createPatchCommand } from './commands/patch.js';
import { createConsoleCommand } from './commands/console/index.js';
import { createCacheCommand } from './commands/cache.js';
import { createChatCommand } from './commands/chat.js';
import { createMigrateCommand } from './commands/migrate.js';
import { createRotateCommand } from './commands/rotate.js';
import { ApiClient, ApiError } from './api-client.js';
@@ -216,6 +217,13 @@ export function createProgram(): Command {
log: (...args) => console.log(...args),
}));
program.addCommand(createChatCommand({
client,
baseUrl,
...(creds?.token !== undefined ? { token: creds.token } : {}),
log: (...args) => console.log(...args),
}));
program.addCommand(createPatchCommand({
client,
log: (...args) => console.log(...args),

View File

@@ -183,3 +183,51 @@ describe('bash completions', () => {
expect(fnMatch, '_mcpctl_resource_names must not use grep on name').not.toMatch(/grep.*"name"/);
});
});
describe('agent + chat completions', () => {
it('fish lists agents as a resource type', () => {
expect(fishFile).toMatch(/set -l resources [^\n]*\bagents\b/);
});
it('fish accepts both `agent` and `agents` aliases', () => {
const aliasLine = fishFile.split('\n').find((l) => l.startsWith(' set -l resource_aliases'));
expect(aliasLine).toMatch(/\bagent\b/);
expect(aliasLine).toMatch(/\bagents\b/);
});
it('fish offers `chat` as a top-level command', () => {
expect(fishFile).toMatch(/set -l commands [^\n]*\bchat\b/);
});
it('fish offers `agent` under `mcpctl create`', () => {
expect(fishFile).toMatch(/-a agent\b[^\n]*Create an Agent/);
});
it('fish wires --llm flag for create agent', () => {
expect(fishFile).toMatch(/__mcpctl_subcmd_active create agent[^\n]*-l llm\b/);
});
it('bash lists agents in resources and resource_aliases', () => {
expect(bashFile).toMatch(/local resources="[^"]*\bagents\b[^"]*"/);
expect(bashFile).toMatch(/local resource_aliases="[^"]*\bagent\b[^"]*"/);
});
it('bash includes `chat` in the commands list', () => {
expect(bashFile).toMatch(/local commands="[^"]*\bchat\b[^"]*"/);
});
it('bash dispatches a `chat)` case that completes with agent names + LiteLLM-style flags', () => {
const chatBlock = bashFile.match(/chat\)[\s\S]*?return ;;/)?.[0] ?? '';
expect(chatBlock, 'chat must call _mcpctl_resource_names with "agents"').toContain('"agents"');
expect(chatBlock, 'chat must offer --temperature').toContain('--temperature');
expect(chatBlock, 'chat must offer --thread').toContain('--thread');
expect(chatBlock, 'chat must offer --no-stream').toContain('--no-stream');
});
it('bash dispatches `create agent` with the correct flags', () => {
const createBlock = bashFile.match(/agent\)[\s\S]*?;;/)?.[0] ?? '';
expect(createBlock).toContain('--llm');
expect(createBlock).toContain('--system-prompt');
expect(createBlock).toContain('--default-temperature');
});
});