From 21f406037af655a59873d0f477a9c6c71e9e44ab Mon Sep 17 00:00:00 2001 From: Michal Date: Sun, 26 Apr 2026 18:37:06 +0100 Subject: [PATCH] feat(chat): print agent + system prompt banner at chat start When you launch \`mcpctl chat \` it's not always obvious which agent, LLM, project, or system prompt you're actually wired to, especially when --system / --system-append flags are layered on top of the agent's defaults. The session would just start at \`> \` with no confirmation of the configuration. Now both REPL and one-shot modes print a banner to stderr listing: - agent name + description - LLM + project (if attached) - effective system prompt (or --system override) and any --system-append addendum, indented for readability - active sampling overrides (temperature, top_p, etc.) Goes through stderr so \`mcpctl chat ... -m "hi" 2>/dev/null\` keeps piping clean. Best-effort: a metadata fetch failure logs and lets the chat proceed rather than blocking. Co-Authored-By: Claude Opus 4.7 (1M context) --- src/cli/src/commands/chat.ts | 81 +++++++++++++++++++++++++++++++++++- 1 file changed, 80 insertions(+), 1 deletion(-) diff --git a/src/cli/src/commands/chat.ts b/src/cli/src/commands/chat.ts index f7fb9da..6b3a184 100644 --- a/src/cli/src/commands/chat.ts +++ b/src/cli/src/commands/chat.ts @@ -138,6 +138,7 @@ async function runOneShot( overrides: Overrides, stream: boolean | undefined, ): Promise { + await printChatHeader(deps, agent, overrides); if (stream === false) { const body: Record = { message, ...overrides }; if (threadId !== undefined) body.threadId = threadId; @@ -170,12 +171,14 @@ async function runRepl( const rl = readline.createInterface({ input: process.stdin, output: process.stdout }); const ask = (q: string): Promise => new Promise((resolve) => rl.question(q, resolve)); + await printChatHeader(deps, agent, overrides); + // The status bar persists across turns inside a REPL — it shows the last // response's final rate between messages, then refreshes live during the // next stream. Only enabled for streaming mode (no rate to show otherwise). const bar = stream === false ? null : installStatusBar(); - process.stderr.write(`Chat with agent '${agent}'. Slash commands: /set /system /tools /clear /save /quit. Ctrl-D to exit.\n`); + process.stderr.write(`Slash commands: /set /system /tools /clear /save /quit. Ctrl-D to exit.\n`); if (threadId !== undefined) { process.stderr.write(`(resuming thread ${threadId})\n`); } @@ -660,6 +663,82 @@ function installStatusBar(): StatusBar | null { return { update, teardown }; } +interface AgentInfo { + name: string; + description: string; + systemPrompt: string; + llm: { name: string }; + project: { name: string } | null; +} + +/** + * Prints a startup banner showing what the chat session will be running with: + * agent name, LLM, project, the assembled system prompt, and any session + * overrides. Lets the user verify the wiring before they spend tokens. + * + * Best-effort: if the agent fetch fails we log and continue rather than + * blocking the chat — the user might still want to send a message. + */ +async function printChatHeader( + deps: ChatCommandDeps, + agent: string, + overrides: Overrides, +): Promise { + let info: AgentInfo; + try { + info = await deps.client.get(`/api/v1/agents/${encodeURIComponent(agent)}`); + } catch (err) { + process.stderr.write(`(could not fetch agent metadata: ${(err as Error).message})\n`); + return; + } + + const sep = '─'.repeat(60); + const out = (s: string): void => { process.stderr.write(`${styleStats(s)}\n`); }; + const indent = (text: string): string => + text.split('\n').map((l) => ` ${l}`).join('\n'); + + out(sep); + out(`Agent: ${info.name}${info.description !== '' ? ` — ${info.description}` : ''}`); + const tail = info.project !== null ? ` Project: ${info.project.name}` : ''; + out(`LLM: ${info.llm.name}${tail}`); + + if (overrides.systemOverride !== undefined) { + out(`System prompt (--system replaces agent.systemPrompt):`); + out(indent(overrides.systemOverride)); + } else { + out(`System prompt:`); + out(indent(info.systemPrompt !== '' ? info.systemPrompt : '(empty)')); + } + if (overrides.systemAppend !== undefined) { + out(`System append (--system-append):`); + out(indent(overrides.systemAppend)); + } + if (info.project !== null) { + out(`(project prompts auto-appended at chat time; /tools lists MCP servers)`); + } + + const sessionOverrides = describeSessionOverrides(overrides); + if (sessionOverrides !== '') { + out(`Sampling overrides: ${sessionOverrides}`); + } + out(sep); +} + +function describeSessionOverrides(o: Overrides): string { + const parts: string[] = []; + if (o.temperature !== undefined) parts.push(`temperature=${String(o.temperature)}`); + if (o.top_p !== undefined) parts.push(`top_p=${String(o.top_p)}`); + if (o.top_k !== undefined) parts.push(`top_k=${String(o.top_k)}`); + if (o.max_tokens !== undefined) parts.push(`max_tokens=${String(o.max_tokens)}`); + if (o.seed !== undefined) parts.push(`seed=${String(o.seed)}`); + if (o.stop !== undefined && o.stop.length > 0) parts.push(`stop=${o.stop.join(',')}`); + if (o.tools_allowlist !== undefined) parts.push(`allow_tools=${o.tools_allowlist.join(',')}`); + if (o.extra !== undefined) { + for (const [k, v] of Object.entries(o.extra)) parts.push(`${k}=${String(v)}`); + } + return parts.join(' '); +} + function collect(value: string, prev: string[]): string[] { return [...prev, value]; }