feat: eager vLLM warmup and smart page titles in paginate stage
- Add warmup() to LlmProvider interface for eager subprocess startup - ManagedVllmProvider.warmup() starts vLLM in background on project load - ProviderRegistry.warmupAll() triggers all managed providers - NamedProvider proxies warmup() to inner provider - paginate stage generates LLM-powered descriptive page titles when available, cached by content hash, falls back to generic "Page N" - project-mcp-endpoint calls warmupAll() on router creation so vLLM is loading while the session initializes Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -43,6 +43,7 @@ function makeProject(overrides: Partial<Project> = {}): Project {
|
||||
description: '',
|
||||
prompt: '',
|
||||
proxyMode: 'direct',
|
||||
proxyModel: '',
|
||||
gated: true,
|
||||
llmProvider: null,
|
||||
llmModel: null,
|
||||
@@ -400,8 +401,8 @@ describe('PromptService', () => {
|
||||
const result = await service.getVisiblePrompts('proj-1', 'sess-1');
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0]).toEqual({ name: 'approved-1', content: 'A', type: 'prompt' });
|
||||
expect(result[1]).toEqual({ name: 'pending-1', content: 'B', type: 'promptrequest' });
|
||||
expect(result[0]).toMatchObject({ name: 'approved-1', content: 'A', type: 'prompt' });
|
||||
expect(result[1]).toMatchObject({ name: 'pending-1', content: 'B', type: 'promptrequest' });
|
||||
});
|
||||
|
||||
it('should not include pending requests without sessionId', async () => {
|
||||
|
||||
Reference in New Issue
Block a user