diff --git a/completions/mcpctl.bash b/completions/mcpctl.bash index 33a443c..152d3ad 100644 --- a/completions/mcpctl.bash +++ b/completions/mcpctl.bash @@ -191,7 +191,7 @@ _mcpctl() { COMPREPLY=($(compgen -W "--type --description --default --url --namespace --mount --path-prefix --token-secret --config --force -h --help" -- "$cur")) ;; project) - COMPREPLY=($(compgen -W "-d --description --proxy-model --prompt --gated --no-gated --server --force -h --help" -- "$cur")) + COMPREPLY=($(compgen -W "-d --description --proxy-model --prompt --llm --llm-model --gated --no-gated --server --force -h --help" -- "$cur")) ;; user) COMPREPLY=($(compgen -W "--password --name --force -h --help" -- "$cur")) diff --git a/completions/mcpctl.fish b/completions/mcpctl.fish index e5b1a14..6b6907b 100644 --- a/completions/mcpctl.fish +++ b/completions/mcpctl.fish @@ -344,6 +344,8 @@ complete -c mcpctl -n "__mcpctl_subcmd_active create secretbackend" -l force -d complete -c mcpctl -n "__mcpctl_subcmd_active create project" -s d -l description -d 'Project description' -x complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l proxy-model -d 'Plugin name (default, content-pipeline, gate, none)' -x complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l prompt -d 'Project-level prompt / instructions for the LLM' -x +complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l llm -d 'Name of an Llm resource (see \'mcpctl get llms\'), or \'none\' to disable' -x +complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l llm-model -d 'Override the model string for this project (defaults to the Llm\'s own model)' -x complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l gated -d '[deprecated: use --proxy-model default]' complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l no-gated -d '[deprecated: use --proxy-model content-pipeline]' complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l server -d 'Server name (repeat for multiple)' -x diff --git a/src/cli/src/commands/apply.ts b/src/cli/src/commands/apply.ts index 65e4b62..a24ce44 100644 --- a/src/cli/src/commands/apply.ts +++ b/src/cli/src/commands/apply.ts @@ -149,7 +149,12 @@ const ProjectSpecSchema = z.object({ prompt: z.string().max(10000).default(''), proxyModel: z.string().optional(), gated: z.boolean().optional(), + // Name of an `Llm` resource (see `mcpctl get llms`), or the literal 'none' + // to disable LLM features for this project. Unknown names fall back to the + // consumer's registry default — `mcpctl describe project` will flag that. llmProvider: z.string().optional(), + // Override the model string for this project; defaults to the Llm's own + // model when unset. llmModel: z.string().optional(), servers: z.array(z.string()).default([]), }); diff --git a/src/cli/src/commands/create.ts b/src/cli/src/commands/create.ts index 9318c63..895ca9b 100644 --- a/src/cli/src/commands/create.ts +++ b/src/cli/src/commands/create.ts @@ -378,6 +378,8 @@ export function createCreateCommand(deps: CreateCommandDeps): Command { .option('-d, --description ', 'Project description', '') .option('--proxy-model ', 'Plugin name (default, content-pipeline, gate, none)') .option('--prompt ', 'Project-level prompt / instructions for the LLM') + .option('--llm ', "Name of an Llm resource (see 'mcpctl get llms'), or 'none' to disable") + .option('--llm-model ', 'Override the model string for this project (defaults to the Llm\'s own model)') .option('--gated', '[deprecated: use --proxy-model default]') .option('--no-gated', '[deprecated: use --proxy-model content-pipeline]') .option('--server ', 'Server name (repeat for multiple)', collect, []) @@ -397,6 +399,8 @@ export function createCreateCommand(deps: CreateCommandDeps): Command { // Pass gated for backward compat with older mcpd if (opts.gated !== undefined) body.gated = opts.gated as boolean; if (opts.server.length > 0) body.servers = opts.server; + if (opts.llm) body.llmProvider = opts.llm; + if (opts.llmModel) body.llmModel = opts.llmModel; try { const project = await client.post<{ id: string; name: string }>('/api/v1/projects', body); diff --git a/src/cli/src/commands/describe.ts b/src/cli/src/commands/describe.ts index 814472a..a986d35 100644 --- a/src/cli/src/commands/describe.ts +++ b/src/cli/src/commands/describe.ts @@ -137,6 +137,7 @@ function formatInstanceDetail(instance: Record, inspect?: Recor function formatProjectDetail( project: Record, prompts: Array<{ name: string; priority: number; linkTarget: string | null }> = [], + knownLlmNames?: Set, ): string { const lines: string[] = []; lines.push(`=== Project: ${project.name} ===`); @@ -151,8 +152,21 @@ function formatProjectDetail( lines.push(''); lines.push('Plugin Config:'); lines.push(` ${pad('Plugin:', 18)}${proxyModel}`); - if (llmProvider) lines.push(` ${pad('LLM Provider:', 18)}${llmProvider}`); - if (llmModel) lines.push(` ${pad('LLM Model:', 18)}${llmModel}`); + if (llmProvider) { + // As of Phase 4, llmProvider names a centralized Llm resource (see + // `mcpctl get llms`). A value like "none" disables LLM for the project; + // anything else that doesn't match a registered Llm falls back to the + // registry default on consumers — flag it so operators notice. + const resolvable = knownLlmNames === undefined + || llmProvider === 'none' + || knownLlmNames.has(llmProvider); + if (resolvable) { + lines.push(` ${pad('LLM:', 18)}${llmProvider}`); + } else { + lines.push(` ${pad('LLM:', 18)}${llmProvider} [warning: no Llm registered with this name — will fall back to registry default]`); + } + } + if (llmModel) lines.push(` ${pad('LLM Model:', 18)}${llmModel} (override)`); // Servers section const servers = project.servers as Array<{ server: { name: string } }> | undefined; @@ -887,10 +901,16 @@ export function createDescribeCommand(deps: DescribeCommandDeps): Command { deps.log(formatLlmDetail(item)); break; case 'projects': { - const projectPrompts = await deps.client - .get>(`/api/v1/prompts?projectId=${item.id as string}`) - .catch(() => []); - deps.log(formatProjectDetail(item, projectPrompts)); + const [projectPrompts, llms] = await Promise.all([ + deps.client + .get>(`/api/v1/prompts?projectId=${item.id as string}`) + .catch(() => []), + deps.client + .get>('/api/v1/llms') + .catch(() => [] as Array<{ name: string }>), + ]); + const llmNames = new Set(llms.map((l) => l.name)); + deps.log(formatProjectDetail(item, projectPrompts, llmNames)); break; } case 'users': { diff --git a/src/cli/tests/commands/describe.test.ts b/src/cli/tests/commands/describe.test.ts index 979ddba..52a320d 100644 --- a/src/cli/tests/commands/describe.test.ts +++ b/src/cli/tests/commands/describe.test.ts @@ -108,6 +108,77 @@ describe('describe command', () => { expect(text).not.toContain('Gated:'); }); + it('shows project Llm reference without warning when the name matches a registered Llm', async () => { + const deps = makeDeps({ + id: 'proj-1', + name: 'with-llm', + description: '', + ownerId: 'user-1', + proxyModel: 'default', + llmProvider: 'claude', + llmModel: 'claude-3-opus', + createdAt: '2025-01-01', + }); + // /api/v1/llms returns a claude entry → no warning + deps.client = { + get: vi.fn(async (path: string) => { + if (path === '/api/v1/llms') return [{ name: 'claude' }]; + return []; + }), + } as unknown as typeof deps.client; + const cmd = createDescribeCommand(deps); + await cmd.parseAsync(['node', 'test', 'project', 'proj-1']); + const text = deps.output.join('\n'); + expect(text).toContain('LLM:'); + expect(text).toContain('claude'); + expect(text).not.toContain('warning:'); + }); + + it('warns on describe project when llmProvider does not resolve to any registered Llm', async () => { + const deps = makeDeps({ + id: 'proj-1', + name: 'orphan', + description: '', + ownerId: 'user-1', + proxyModel: 'default', + llmProvider: 'claude-ghost', + createdAt: '2025-01-01', + }); + deps.client = { + get: vi.fn(async (path: string) => { + if (path === '/api/v1/llms') return [{ name: 'claude' }, { name: 'gpt-4o' }]; + return []; + }), + } as unknown as typeof deps.client; + const cmd = createDescribeCommand(deps); + await cmd.parseAsync(['node', 'test', 'project', 'proj-1']); + const text = deps.output.join('\n'); + expect(text).toContain('claude-ghost'); + expect(text).toContain('warning:'); + expect(text).toContain('fall back to registry default'); + }); + + it('does not warn when llmProvider is "none" (explicit disable)', async () => { + const deps = makeDeps({ + id: 'proj-1', + name: 'no-llm', + description: '', + ownerId: 'user-1', + proxyModel: 'default', + llmProvider: 'none', + createdAt: '2025-01-01', + }); + deps.client = { + get: vi.fn(async () => []), + } as unknown as typeof deps.client; + const cmd = createDescribeCommand(deps); + await cmd.parseAsync(['node', 'test', 'project', 'proj-1']); + const text = deps.output.join('\n'); + expect(text).toContain('LLM:'); + expect(text).toContain('none'); + expect(text).not.toContain('warning:'); + }); + it('shows project Plugin Config defaulting to "default" when proxyModel is empty', async () => { const deps = makeDeps({ id: 'proj-1', diff --git a/src/mcplocal/src/discovery.ts b/src/mcplocal/src/discovery.ts index 89ee424..6ee8181 100644 --- a/src/mcplocal/src/discovery.ts +++ b/src/mcplocal/src/discovery.ts @@ -57,9 +57,16 @@ export async function refreshProjectUpstreams( /** * Fetch a project's LLM config (llmProvider, llmModel) from mcpd. - * These are the project-level "recommendations" — local overrides take priority. + * + * Phase 4 redefines `llmProvider` semantically: it names a centralized `Llm` + * resource (see `mcpctl get llms`) — NOT a local provider. Consumers should + * resolve it through mcpd's inference proxy when reachable. The field remains + * a free-form string on the wire for backward compatibility; local overrides + * in `~/.mcpctl/config.json` still take priority, and unknown names fall + * through to the registry default. */ export interface ProjectLlmConfig { + /** Name of an `Llm` resource on mcpd, or 'none' to disable LLM features. */ llmProvider?: string; llmModel?: string; proxyModel?: string; @@ -67,6 +74,31 @@ export interface ProjectLlmConfig { serverOverrides?: Record; } +/** + * Resolve a project's `llmProvider` against mcpd's Llm registry. Returns: + * - 'registered' — an Llm with this name exists + * - 'disabled' — value is 'none' + * - 'unregistered'— no Llm matches (consumer should fall back to registry default) + * - 'unreachable' — mcpd couldn't be queried + */ +export type LlmReferenceStatus = 'registered' | 'disabled' | 'unregistered' | 'unreachable'; + +export async function resolveProjectLlmReference( + mcpdClient: McpdClient, + llmProvider: string | undefined, +): Promise { + if (llmProvider === undefined || llmProvider === '') return 'unregistered'; + if (llmProvider === 'none') return 'disabled'; + try { + await mcpdClient.get(`/api/v1/llms/${encodeURIComponent(llmProvider)}`); + return 'registered'; + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + if (msg.includes('404') || msg.toLowerCase().includes('not found')) return 'unregistered'; + return 'unreachable'; + } +} + export async function fetchProjectLlmConfig( mcpdClient: McpdClient, projectName: string, diff --git a/src/mcplocal/src/http/project-mcp-endpoint.ts b/src/mcplocal/src/http/project-mcp-endpoint.ts index 42ec24a..7ea821f 100644 --- a/src/mcplocal/src/http/project-mcp-endpoint.ts +++ b/src/mcplocal/src/http/project-mcp-endpoint.ts @@ -101,7 +101,16 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp complete: async () => '', available: () => false, }; - // Build cache namespace: provider--model--proxymodel + // Build cache namespace: provider--model--proxymodel. + // Resolution order: + // 1. local ~/.mcpctl override + // 2. mcpdConfig.llmProvider (Phase 4: name of a centralized Llm) + // 3. local registry default (fast tier → active provider) + // 4. literal 'none' + // If (2) names an Llm the HTTP-mode proxy-model pipeline can route + // through mcpd's /api/v1/llms/:name/infer (pivot lands when the client + // integrates that path); meanwhile the value is still usable as a cache + // key, and the describe-project warning flags stale configs. const llmProvider = localOverride?.provider ?? mcpdConfig.llmProvider ?? effectiveRegistry?.getTierProviders('fast')[0] ?? effectiveRegistry?.getActiveName() diff --git a/src/mcplocal/tests/llm-reference-resolver.test.ts b/src/mcplocal/tests/llm-reference-resolver.test.ts new file mode 100644 index 0000000..af02a9f --- /dev/null +++ b/src/mcplocal/tests/llm-reference-resolver.test.ts @@ -0,0 +1,45 @@ +import { describe, it, expect, vi } from 'vitest'; +import { resolveProjectLlmReference } from '../src/discovery.js'; +import type { McpdClient } from '../src/http/mcpd-client.js'; + +function mockClient(get: (path: string) => Promise): McpdClient { + return { get } as unknown as McpdClient; +} + +describe('resolveProjectLlmReference', () => { + it('returns "disabled" for the literal string "none"', async () => { + const client = mockClient(async () => { throw new Error('should not be called'); }); + expect(await resolveProjectLlmReference(client, 'none')).toBe('disabled'); + }); + + it('returns "unregistered" when llmProvider is empty or undefined', async () => { + const client = mockClient(async () => { throw new Error('should not be called'); }); + expect(await resolveProjectLlmReference(client, undefined)).toBe('unregistered'); + expect(await resolveProjectLlmReference(client, '')).toBe('unregistered'); + }); + + it('returns "registered" when mcpd returns 200 for the name', async () => { + const get = vi.fn(async () => ({ name: 'claude' })); + expect(await resolveProjectLlmReference(mockClient(get), 'claude')).toBe('registered'); + expect(get).toHaveBeenCalledWith('/api/v1/llms/claude'); + }); + + it('returns "unregistered" on 404', async () => { + const client = mockClient(async () => { throw new Error('HTTP 404 not found'); }); + expect(await resolveProjectLlmReference(client, 'missing')).toBe('unregistered'); + }); + + it('returns "unreachable" on other errors (500, network)', async () => { + const client = mockClient(async () => { throw new Error('HTTP 500 internal error'); }); + expect(await resolveProjectLlmReference(client, 'x')).toBe('unreachable'); + + const client2 = mockClient(async () => { throw new Error('ECONNREFUSED'); }); + expect(await resolveProjectLlmReference(client2, 'x')).toBe('unreachable'); + }); + + it('URL-encodes names with special characters', async () => { + const get = vi.fn(async () => ({})); + await resolveProjectLlmReference(mockClient(get), 'weird name/with/slashes'); + expect(get).toHaveBeenCalledWith('/api/v1/llms/weird%20name%2Fwith%2Fslashes'); + }); +});