feat: completions update, create promptrequest, LLM flag rename, ACP content fix
Some checks failed
CI / lint (pull_request) Has been cancelled
CI / typecheck (pull_request) Has been cancelled
CI / test (pull_request) Has been cancelled
CI / build (pull_request) Has been cancelled
CI / package (pull_request) Has been cancelled

- Add prompts/promptrequests to shell completions (fish + bash)
- Add approve, setup, prompt, promptrequest commands to completions
- Add `create promptrequest` CLI command (POST /projects/:name/promptrequests)
- Rename --proxy-mode-llm-provider/model to --llm-provider/model
- Fix ACP client: handle single-object content format from real Gemini
- Add tests for single-object content and agent_thought_chunk filtering

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Michal
2026-02-25 00:21:31 +00:00
parent bc769c4eeb
commit 9481d394a1
7 changed files with 143 additions and 26 deletions

View File

@@ -196,8 +196,8 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
.argument('<name>', 'Project name')
.option('-d, --description <text>', 'Project description', '')
.option('--proxy-mode <mode>', 'Proxy mode (direct, filtered)')
.option('--proxy-mode-llm-provider <name>', 'LLM provider name (for filtered proxy mode)')
.option('--proxy-mode-llm-model <name>', 'LLM model name (for filtered proxy mode)')
.option('--llm-provider <name>', 'LLM provider name')
.option('--llm-model <name>', 'LLM model name')
.option('--prompt <text>', 'Project-level prompt / instructions for the LLM')
.option('--server <name>', 'Server name (repeat for multiple)', collect, [])
.option('--force', 'Update if already exists')
@@ -208,8 +208,8 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
proxyMode: opts.proxyMode ?? 'direct',
};
if (opts.prompt) body.prompt = opts.prompt;
if (opts.proxyModeLlmProvider) body.llmProvider = opts.proxyModeLlmProvider;
if (opts.proxyModeLlmModel) body.llmModel = opts.proxyModeLlmModel;
if (opts.llmProvider) body.llmProvider = opts.llmProvider;
if (opts.llmModel) body.llmModel = opts.llmModel;
if (opts.server.length > 0) body.servers = opts.server;
try {
@@ -379,5 +379,31 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
log(`prompt '${prompt.name}' created (id: ${prompt.id})`);
});
// --- create promptrequest ---
cmd.command('promptrequest')
.description('Create a prompt request (pending proposal that needs approval)')
.argument('<name>', 'Prompt request name (lowercase alphanumeric with hyphens)')
.requiredOption('--project <name>', 'Project name (required)')
.option('--content <text>', 'Prompt content text')
.option('--content-file <path>', 'Read prompt content from file')
.action(async (name: string, opts) => {
let content = opts.content as string | undefined;
if (opts.contentFile) {
const fs = await import('node:fs/promises');
content = await fs.readFile(opts.contentFile as string, 'utf-8');
}
if (!content) {
throw new Error('--content or --content-file is required');
}
const projectName = opts.project as string;
const pr = await client.post<{ id: string; name: string }>(
`/api/v1/projects/${encodeURIComponent(projectName)}/promptrequests`,
{ name, content },
);
log(`prompt request '${pr.name}' created (id: ${pr.id})`);
log(` approve with: mcpctl approve promptrequest ${pr.name}`);
});
return cmd;
}

View File

@@ -30,8 +30,8 @@ describe('project with new fields', () => {
'project', 'smart-home',
'-d', 'Smart home project',
'--proxy-mode', 'filtered',
'--proxy-mode-llm-provider', 'gemini-cli',
'--proxy-mode-llm-model', 'gemini-2.0-flash',
'--llm-provider', 'gemini-cli',
'--llm-model', 'gemini-2.0-flash',
'--server', 'my-grafana',
'--server', 'my-ha',
], { from: 'user' });

View File

@@ -15,7 +15,7 @@ describe('fish completions', () => {
});
it('does not offer resource types without __mcpctl_needs_resource_type guard', () => {
const resourceTypes = ['servers', 'instances', 'secrets', 'templates', 'projects', 'users', 'groups', 'rbac'];
const resourceTypes = ['servers', 'instances', 'secrets', 'templates', 'projects', 'users', 'groups', 'rbac', 'prompts', 'promptrequests'];
const lines = fishFile.split('\n').filter((l) => l.startsWith('complete '));
for (const line of lines) {

View File

@@ -205,12 +205,16 @@ export class AcpClient {
// Collect text from agent_message_chunk
if (update.sessionUpdate === 'agent_message_chunk') {
const content = update.content as Array<{ type: string; text?: string }> | undefined;
if (content) {
for (const block of content) {
if (block.type === 'text' && block.text) {
this.activePromptChunks.push(block.text);
}
const content = update.content;
// Gemini ACP sends content as a single object {type, text} or an array [{type, text}]
const blocks: Array<{ type: string; text?: string }> = Array.isArray(content)
? content as Array<{ type: string; text?: string }>
: content && typeof content === 'object'
? [content as { type: string; text?: string }]
: [];
for (const block of blocks) {
if (block.type === 'text' && block.text) {
this.activePromptChunks.push(block.text);
}
}
}

View File

@@ -230,6 +230,77 @@ describe('AcpClient', () => {
expect(result).toBe('Part A Part B');
});
it('handles single-object content (real Gemini ACP format)', async () => {
createClient();
autoHandshake('sess-1');
await client.ensureReady();
mock.stdin.write.mockImplementation((data: string) => {
const msg = JSON.parse(data.trim()) as { id: number; method: string };
if (msg.method === 'session/prompt') {
setImmediate(() => {
// Real Gemini ACP sends content as a single object, not an array
mock.sendLine({
jsonrpc: '2.0',
method: 'session/update',
params: {
sessionId: 'sess-1',
update: {
sessionUpdate: 'agent_message_chunk',
content: { type: 'text', text: 'ok' },
},
},
});
mock.sendResponse(msg.id, { stopReason: 'end_turn' });
});
}
});
const result = await client.prompt('test');
expect(result).toBe('ok');
});
it('ignores agent_thought_chunk notifications', async () => {
createClient();
autoHandshake('sess-1');
await client.ensureReady();
mock.stdin.write.mockImplementation((data: string) => {
const msg = JSON.parse(data.trim()) as { id: number; method: string };
if (msg.method === 'session/prompt') {
setImmediate(() => {
// Gemini sends thought chunks before message chunks
mock.sendLine({
jsonrpc: '2.0',
method: 'session/update',
params: {
sessionId: 'sess-1',
update: {
sessionUpdate: 'agent_thought_chunk',
content: { type: 'text', text: 'Thinking about it...' },
},
},
});
mock.sendLine({
jsonrpc: '2.0',
method: 'session/update',
params: {
sessionId: 'sess-1',
update: {
sessionUpdate: 'agent_message_chunk',
content: { type: 'text', text: 'ok' },
},
},
});
mock.sendResponse(msg.id, { stopReason: 'end_turn' });
});
}
});
const result = await client.prompt('test');
expect(result).toBe('ok');
});
it('calls ensureReady automatically (lazy init)', async () => {
createClient();
autoHandshake('sess-auto');