feat(chat): print agent + system prompt banner at chat start
Some checks failed
CI/CD / typecheck (pull_request) Successful in 53s
CI/CD / test (pull_request) Successful in 1m5s
CI/CD / lint (pull_request) Successful in 2m29s
CI/CD / smoke (pull_request) Failing after 1m39s
CI/CD / build (pull_request) Successful in 5m30s
CI/CD / publish (pull_request) Has been skipped

When you launch \`mcpctl chat <agent>\` it's not always obvious which
agent, LLM, project, or system prompt you're actually wired to,
especially when --system / --system-append flags are layered on top
of the agent's defaults. The session would just start at \`> \` with
no confirmation of the configuration.

Now both REPL and one-shot modes print a banner to stderr listing:
  - agent name + description
  - LLM + project (if attached)
  - effective system prompt (or --system override) and any
    --system-append addendum, indented for readability
  - active sampling overrides (temperature, top_p, etc.)

Goes through stderr so \`mcpctl chat ... -m "hi" 2>/dev/null\` keeps
piping clean. Best-effort: a metadata fetch failure logs and lets
the chat proceed rather than blocking.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
Michal
2026-04-26 18:37:06 +01:00
parent ae54210a52
commit 21f406037a

View File

@@ -138,6 +138,7 @@ async function runOneShot(
overrides: Overrides, overrides: Overrides,
stream: boolean | undefined, stream: boolean | undefined,
): Promise<void> { ): Promise<void> {
await printChatHeader(deps, agent, overrides);
if (stream === false) { if (stream === false) {
const body: Record<string, unknown> = { message, ...overrides }; const body: Record<string, unknown> = { message, ...overrides };
if (threadId !== undefined) body.threadId = threadId; if (threadId !== undefined) body.threadId = threadId;
@@ -170,12 +171,14 @@ async function runRepl(
const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); const rl = readline.createInterface({ input: process.stdin, output: process.stdout });
const ask = (q: string): Promise<string> => new Promise((resolve) => rl.question(q, resolve)); const ask = (q: string): Promise<string> => new Promise((resolve) => rl.question(q, resolve));
await printChatHeader(deps, agent, overrides);
// The status bar persists across turns inside a REPL — it shows the last // The status bar persists across turns inside a REPL — it shows the last
// response's final rate between messages, then refreshes live during the // response's final rate between messages, then refreshes live during the
// next stream. Only enabled for streaming mode (no rate to show otherwise). // next stream. Only enabled for streaming mode (no rate to show otherwise).
const bar = stream === false ? null : installStatusBar(); const bar = stream === false ? null : installStatusBar();
process.stderr.write(`Chat with agent '${agent}'. Slash commands: /set /system /tools /clear /save /quit. Ctrl-D to exit.\n`); process.stderr.write(`Slash commands: /set /system /tools /clear /save /quit. Ctrl-D to exit.\n`);
if (threadId !== undefined) { if (threadId !== undefined) {
process.stderr.write(`(resuming thread ${threadId})\n`); process.stderr.write(`(resuming thread ${threadId})\n`);
} }
@@ -660,6 +663,82 @@ function installStatusBar(): StatusBar | null {
return { update, teardown }; return { update, teardown };
} }
interface AgentInfo {
name: string;
description: string;
systemPrompt: string;
llm: { name: string };
project: { name: string } | null;
}
/**
* Prints a startup banner showing what the chat session will be running with:
* agent name, LLM, project, the assembled system prompt, and any session
* overrides. Lets the user verify the wiring before they spend tokens.
*
* Best-effort: if the agent fetch fails we log and continue rather than
* blocking the chat — the user might still want to send a message.
*/
async function printChatHeader(
deps: ChatCommandDeps,
agent: string,
overrides: Overrides,
): Promise<void> {
let info: AgentInfo;
try {
info = await deps.client.get<AgentInfo>(`/api/v1/agents/${encodeURIComponent(agent)}`);
} catch (err) {
process.stderr.write(`(could not fetch agent metadata: ${(err as Error).message})\n`);
return;
}
const sep = '─'.repeat(60);
const out = (s: string): void => { process.stderr.write(`${styleStats(s)}\n`); };
const indent = (text: string): string =>
text.split('\n').map((l) => ` ${l}`).join('\n');
out(sep);
out(`Agent: ${info.name}${info.description !== '' ? `${info.description}` : ''}`);
const tail = info.project !== null ? ` Project: ${info.project.name}` : '';
out(`LLM: ${info.llm.name}${tail}`);
if (overrides.systemOverride !== undefined) {
out(`System prompt (--system replaces agent.systemPrompt):`);
out(indent(overrides.systemOverride));
} else {
out(`System prompt:`);
out(indent(info.systemPrompt !== '' ? info.systemPrompt : '(empty)'));
}
if (overrides.systemAppend !== undefined) {
out(`System append (--system-append):`);
out(indent(overrides.systemAppend));
}
if (info.project !== null) {
out(`(project prompts auto-appended at chat time; /tools lists MCP servers)`);
}
const sessionOverrides = describeSessionOverrides(overrides);
if (sessionOverrides !== '') {
out(`Sampling overrides: ${sessionOverrides}`);
}
out(sep);
}
function describeSessionOverrides(o: Overrides): string {
const parts: string[] = [];
if (o.temperature !== undefined) parts.push(`temperature=${String(o.temperature)}`);
if (o.top_p !== undefined) parts.push(`top_p=${String(o.top_p)}`);
if (o.top_k !== undefined) parts.push(`top_k=${String(o.top_k)}`);
if (o.max_tokens !== undefined) parts.push(`max_tokens=${String(o.max_tokens)}`);
if (o.seed !== undefined) parts.push(`seed=${String(o.seed)}`);
if (o.stop !== undefined && o.stop.length > 0) parts.push(`stop=${o.stop.join(',')}`);
if (o.tools_allowlist !== undefined) parts.push(`allow_tools=${o.tools_allowlist.join(',')}`);
if (o.extra !== undefined) {
for (const [k, v] of Object.entries(o.extra)) parts.push(`${k}=${String(v)}`);
}
return parts.join(' ');
}
function collect(value: string, prev: string[]): string[] { function collect(value: string, prev: string[]): string[] {
return [...prev, value]; return [...prev, value];
} }