feat(project): Project.llmProvider as Llm reference #55

Merged
michal merged 2 commits from feat/project-llm-ref into main 2026-04-19 21:39:55 +00:00
9 changed files with 197 additions and 9 deletions
Showing only changes of commit de854b1944 - Show all commits

View File

@@ -191,7 +191,7 @@ _mcpctl() {
COMPREPLY=($(compgen -W "--type --description --default --url --namespace --mount --path-prefix --token-secret --config --force -h --help" -- "$cur")) COMPREPLY=($(compgen -W "--type --description --default --url --namespace --mount --path-prefix --token-secret --config --force -h --help" -- "$cur"))
;; ;;
project) project)
COMPREPLY=($(compgen -W "-d --description --proxy-model --prompt --gated --no-gated --server --force -h --help" -- "$cur")) COMPREPLY=($(compgen -W "-d --description --proxy-model --prompt --llm --llm-model --gated --no-gated --server --force -h --help" -- "$cur"))
;; ;;
user) user)
COMPREPLY=($(compgen -W "--password --name --force -h --help" -- "$cur")) COMPREPLY=($(compgen -W "--password --name --force -h --help" -- "$cur"))

View File

@@ -344,6 +344,8 @@ complete -c mcpctl -n "__mcpctl_subcmd_active create secretbackend" -l force -d
complete -c mcpctl -n "__mcpctl_subcmd_active create project" -s d -l description -d 'Project description' -x complete -c mcpctl -n "__mcpctl_subcmd_active create project" -s d -l description -d 'Project description' -x
complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l proxy-model -d 'Plugin name (default, content-pipeline, gate, none)' -x complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l proxy-model -d 'Plugin name (default, content-pipeline, gate, none)' -x
complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l prompt -d 'Project-level prompt / instructions for the LLM' -x complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l prompt -d 'Project-level prompt / instructions for the LLM' -x
complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l llm -d 'Name of an Llm resource (see \'mcpctl get llms\'), or \'none\' to disable' -x
complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l llm-model -d 'Override the model string for this project (defaults to the Llm\'s own model)' -x
complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l gated -d '[deprecated: use --proxy-model default]' complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l gated -d '[deprecated: use --proxy-model default]'
complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l no-gated -d '[deprecated: use --proxy-model content-pipeline]' complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l no-gated -d '[deprecated: use --proxy-model content-pipeline]'
complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l server -d 'Server name (repeat for multiple)' -x complete -c mcpctl -n "__mcpctl_subcmd_active create project" -l server -d 'Server name (repeat for multiple)' -x

View File

@@ -149,7 +149,12 @@ const ProjectSpecSchema = z.object({
prompt: z.string().max(10000).default(''), prompt: z.string().max(10000).default(''),
proxyModel: z.string().optional(), proxyModel: z.string().optional(),
gated: z.boolean().optional(), gated: z.boolean().optional(),
// Name of an `Llm` resource (see `mcpctl get llms`), or the literal 'none'
// to disable LLM features for this project. Unknown names fall back to the
// consumer's registry default — `mcpctl describe project` will flag that.
llmProvider: z.string().optional(), llmProvider: z.string().optional(),
// Override the model string for this project; defaults to the Llm's own
// model when unset.
llmModel: z.string().optional(), llmModel: z.string().optional(),
servers: z.array(z.string()).default([]), servers: z.array(z.string()).default([]),
}); });

View File

@@ -378,6 +378,8 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
.option('-d, --description <text>', 'Project description', '') .option('-d, --description <text>', 'Project description', '')
.option('--proxy-model <name>', 'Plugin name (default, content-pipeline, gate, none)') .option('--proxy-model <name>', 'Plugin name (default, content-pipeline, gate, none)')
.option('--prompt <text>', 'Project-level prompt / instructions for the LLM') .option('--prompt <text>', 'Project-level prompt / instructions for the LLM')
.option('--llm <name>', "Name of an Llm resource (see 'mcpctl get llms'), or 'none' to disable")
.option('--llm-model <model>', 'Override the model string for this project (defaults to the Llm\'s own model)')
.option('--gated', '[deprecated: use --proxy-model default]') .option('--gated', '[deprecated: use --proxy-model default]')
.option('--no-gated', '[deprecated: use --proxy-model content-pipeline]') .option('--no-gated', '[deprecated: use --proxy-model content-pipeline]')
.option('--server <name>', 'Server name (repeat for multiple)', collect, []) .option('--server <name>', 'Server name (repeat for multiple)', collect, [])
@@ -397,6 +399,8 @@ export function createCreateCommand(deps: CreateCommandDeps): Command {
// Pass gated for backward compat with older mcpd // Pass gated for backward compat with older mcpd
if (opts.gated !== undefined) body.gated = opts.gated as boolean; if (opts.gated !== undefined) body.gated = opts.gated as boolean;
if (opts.server.length > 0) body.servers = opts.server; if (opts.server.length > 0) body.servers = opts.server;
if (opts.llm) body.llmProvider = opts.llm;
if (opts.llmModel) body.llmModel = opts.llmModel;
try { try {
const project = await client.post<{ id: string; name: string }>('/api/v1/projects', body); const project = await client.post<{ id: string; name: string }>('/api/v1/projects', body);

View File

@@ -137,6 +137,7 @@ function formatInstanceDetail(instance: Record<string, unknown>, inspect?: Recor
function formatProjectDetail( function formatProjectDetail(
project: Record<string, unknown>, project: Record<string, unknown>,
prompts: Array<{ name: string; priority: number; linkTarget: string | null }> = [], prompts: Array<{ name: string; priority: number; linkTarget: string | null }> = [],
knownLlmNames?: Set<string>,
): string { ): string {
const lines: string[] = []; const lines: string[] = [];
lines.push(`=== Project: ${project.name} ===`); lines.push(`=== Project: ${project.name} ===`);
@@ -151,8 +152,21 @@ function formatProjectDetail(
lines.push(''); lines.push('');
lines.push('Plugin Config:'); lines.push('Plugin Config:');
lines.push(` ${pad('Plugin:', 18)}${proxyModel}`); lines.push(` ${pad('Plugin:', 18)}${proxyModel}`);
if (llmProvider) lines.push(` ${pad('LLM Provider:', 18)}${llmProvider}`); if (llmProvider) {
if (llmModel) lines.push(` ${pad('LLM Model:', 18)}${llmModel}`); // As of Phase 4, llmProvider names a centralized Llm resource (see
// `mcpctl get llms`). A value like "none" disables LLM for the project;
// anything else that doesn't match a registered Llm falls back to the
// registry default on consumers — flag it so operators notice.
const resolvable = knownLlmNames === undefined
|| llmProvider === 'none'
|| knownLlmNames.has(llmProvider);
if (resolvable) {
lines.push(` ${pad('LLM:', 18)}${llmProvider}`);
} else {
lines.push(` ${pad('LLM:', 18)}${llmProvider} [warning: no Llm registered with this name — will fall back to registry default]`);
}
}
if (llmModel) lines.push(` ${pad('LLM Model:', 18)}${llmModel} (override)`);
// Servers section // Servers section
const servers = project.servers as Array<{ server: { name: string } }> | undefined; const servers = project.servers as Array<{ server: { name: string } }> | undefined;
@@ -887,10 +901,16 @@ export function createDescribeCommand(deps: DescribeCommandDeps): Command {
deps.log(formatLlmDetail(item)); deps.log(formatLlmDetail(item));
break; break;
case 'projects': { case 'projects': {
const projectPrompts = await deps.client const [projectPrompts, llms] = await Promise.all([
deps.client
.get<Array<{ name: string; priority: number; linkTarget: string | null }>>(`/api/v1/prompts?projectId=${item.id as string}`) .get<Array<{ name: string; priority: number; linkTarget: string | null }>>(`/api/v1/prompts?projectId=${item.id as string}`)
.catch(() => []); .catch(() => []),
deps.log(formatProjectDetail(item, projectPrompts)); deps.client
.get<Array<{ name: string }>>('/api/v1/llms')
.catch(() => [] as Array<{ name: string }>),
]);
const llmNames = new Set(llms.map((l) => l.name));
deps.log(formatProjectDetail(item, projectPrompts, llmNames));
break; break;
} }
case 'users': { case 'users': {

View File

@@ -108,6 +108,77 @@ describe('describe command', () => {
expect(text).not.toContain('Gated:'); expect(text).not.toContain('Gated:');
}); });
it('shows project Llm reference without warning when the name matches a registered Llm', async () => {
const deps = makeDeps({
id: 'proj-1',
name: 'with-llm',
description: '',
ownerId: 'user-1',
proxyModel: 'default',
llmProvider: 'claude',
llmModel: 'claude-3-opus',
createdAt: '2025-01-01',
});
// /api/v1/llms returns a claude entry → no warning
deps.client = {
get: vi.fn(async (path: string) => {
if (path === '/api/v1/llms') return [{ name: 'claude' }];
return [];
}),
} as unknown as typeof deps.client;
const cmd = createDescribeCommand(deps);
await cmd.parseAsync(['node', 'test', 'project', 'proj-1']);
const text = deps.output.join('\n');
expect(text).toContain('LLM:');
expect(text).toContain('claude');
expect(text).not.toContain('warning:');
});
it('warns on describe project when llmProvider does not resolve to any registered Llm', async () => {
const deps = makeDeps({
id: 'proj-1',
name: 'orphan',
description: '',
ownerId: 'user-1',
proxyModel: 'default',
llmProvider: 'claude-ghost',
createdAt: '2025-01-01',
});
deps.client = {
get: vi.fn(async (path: string) => {
if (path === '/api/v1/llms') return [{ name: 'claude' }, { name: 'gpt-4o' }];
return [];
}),
} as unknown as typeof deps.client;
const cmd = createDescribeCommand(deps);
await cmd.parseAsync(['node', 'test', 'project', 'proj-1']);
const text = deps.output.join('\n');
expect(text).toContain('claude-ghost');
expect(text).toContain('warning:');
expect(text).toContain('fall back to registry default');
});
it('does not warn when llmProvider is "none" (explicit disable)', async () => {
const deps = makeDeps({
id: 'proj-1',
name: 'no-llm',
description: '',
ownerId: 'user-1',
proxyModel: 'default',
llmProvider: 'none',
createdAt: '2025-01-01',
});
deps.client = {
get: vi.fn(async () => []),
} as unknown as typeof deps.client;
const cmd = createDescribeCommand(deps);
await cmd.parseAsync(['node', 'test', 'project', 'proj-1']);
const text = deps.output.join('\n');
expect(text).toContain('LLM:');
expect(text).toContain('none');
expect(text).not.toContain('warning:');
});
it('shows project Plugin Config defaulting to "default" when proxyModel is empty', async () => { it('shows project Plugin Config defaulting to "default" when proxyModel is empty', async () => {
const deps = makeDeps({ const deps = makeDeps({
id: 'proj-1', id: 'proj-1',

View File

@@ -57,9 +57,16 @@ export async function refreshProjectUpstreams(
/** /**
* Fetch a project's LLM config (llmProvider, llmModel) from mcpd. * Fetch a project's LLM config (llmProvider, llmModel) from mcpd.
* These are the project-level "recommendations" — local overrides take priority. *
* Phase 4 redefines `llmProvider` semantically: it names a centralized `Llm`
* resource (see `mcpctl get llms`) — NOT a local provider. Consumers should
* resolve it through mcpd's inference proxy when reachable. The field remains
* a free-form string on the wire for backward compatibility; local overrides
* in `~/.mcpctl/config.json` still take priority, and unknown names fall
* through to the registry default.
*/ */
export interface ProjectLlmConfig { export interface ProjectLlmConfig {
/** Name of an `Llm` resource on mcpd, or 'none' to disable LLM features. */
llmProvider?: string; llmProvider?: string;
llmModel?: string; llmModel?: string;
proxyModel?: string; proxyModel?: string;
@@ -67,6 +74,31 @@ export interface ProjectLlmConfig {
serverOverrides?: Record<string, { proxyModel?: string }>; serverOverrides?: Record<string, { proxyModel?: string }>;
} }
/**
* Resolve a project's `llmProvider` against mcpd's Llm registry. Returns:
* - 'registered' — an Llm with this name exists
* - 'disabled' — value is 'none'
* - 'unregistered'— no Llm matches (consumer should fall back to registry default)
* - 'unreachable' — mcpd couldn't be queried
*/
export type LlmReferenceStatus = 'registered' | 'disabled' | 'unregistered' | 'unreachable';
export async function resolveProjectLlmReference(
mcpdClient: McpdClient,
llmProvider: string | undefined,
): Promise<LlmReferenceStatus> {
if (llmProvider === undefined || llmProvider === '') return 'unregistered';
if (llmProvider === 'none') return 'disabled';
try {
await mcpdClient.get(`/api/v1/llms/${encodeURIComponent(llmProvider)}`);
return 'registered';
} catch (err) {
const msg = err instanceof Error ? err.message : String(err);
if (msg.includes('404') || msg.toLowerCase().includes('not found')) return 'unregistered';
return 'unreachable';
}
}
export async function fetchProjectLlmConfig( export async function fetchProjectLlmConfig(
mcpdClient: McpdClient, mcpdClient: McpdClient,
projectName: string, projectName: string,

View File

@@ -101,7 +101,16 @@ export function registerProjectMcpEndpoint(app: FastifyInstance, mcpdClient: Mcp
complete: async () => '', complete: async () => '',
available: () => false, available: () => false,
}; };
// Build cache namespace: provider--model--proxymodel // Build cache namespace: provider--model--proxymodel.
// Resolution order:
// 1. local ~/.mcpctl override
// 2. mcpdConfig.llmProvider (Phase 4: name of a centralized Llm)
// 3. local registry default (fast tier → active provider)
// 4. literal 'none'
// If (2) names an Llm the HTTP-mode proxy-model pipeline can route
// through mcpd's /api/v1/llms/:name/infer (pivot lands when the client
// integrates that path); meanwhile the value is still usable as a cache
// key, and the describe-project warning flags stale configs.
const llmProvider = localOverride?.provider ?? mcpdConfig.llmProvider const llmProvider = localOverride?.provider ?? mcpdConfig.llmProvider
?? effectiveRegistry?.getTierProviders('fast')[0] ?? effectiveRegistry?.getTierProviders('fast')[0]
?? effectiveRegistry?.getActiveName() ?? effectiveRegistry?.getActiveName()

View File

@@ -0,0 +1,45 @@
import { describe, it, expect, vi } from 'vitest';
import { resolveProjectLlmReference } from '../src/discovery.js';
import type { McpdClient } from '../src/http/mcpd-client.js';
function mockClient(get: (path: string) => Promise<unknown>): McpdClient {
return { get } as unknown as McpdClient;
}
describe('resolveProjectLlmReference', () => {
it('returns "disabled" for the literal string "none"', async () => {
const client = mockClient(async () => { throw new Error('should not be called'); });
expect(await resolveProjectLlmReference(client, 'none')).toBe('disabled');
});
it('returns "unregistered" when llmProvider is empty or undefined', async () => {
const client = mockClient(async () => { throw new Error('should not be called'); });
expect(await resolveProjectLlmReference(client, undefined)).toBe('unregistered');
expect(await resolveProjectLlmReference(client, '')).toBe('unregistered');
});
it('returns "registered" when mcpd returns 200 for the name', async () => {
const get = vi.fn(async () => ({ name: 'claude' }));
expect(await resolveProjectLlmReference(mockClient(get), 'claude')).toBe('registered');
expect(get).toHaveBeenCalledWith('/api/v1/llms/claude');
});
it('returns "unregistered" on 404', async () => {
const client = mockClient(async () => { throw new Error('HTTP 404 not found'); });
expect(await resolveProjectLlmReference(client, 'missing')).toBe('unregistered');
});
it('returns "unreachable" on other errors (500, network)', async () => {
const client = mockClient(async () => { throw new Error('HTTP 500 internal error'); });
expect(await resolveProjectLlmReference(client, 'x')).toBe('unreachable');
const client2 = mockClient(async () => { throw new Error('ECONNREFUSED'); });
expect(await resolveProjectLlmReference(client2, 'x')).toBe('unreachable');
});
it('URL-encodes names with special characters', async () => {
const get = vi.fn(async () => ({}));
await resolveProjectLlmReference(mockClient(get), 'weird name/with/slashes');
expect(get).toHaveBeenCalledWith('/api/v1/llms/weird%20name%2Fwith%2Fslashes');
});
});