feat: Git-based backup system replacing JSON bundle backup/restore

DB is source of truth with git as downstream replica. SSH key generated
on first start, all resource mutations committed as apply-compatible YAML.
Supports manual commit import, conflict resolution (DB wins), disaster
recovery (empty DB restores from git), and timeline branches on restore.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Michal
2026-03-08 01:14:28 +00:00
parent 9fc31e5945
commit 7818cb2194
22 changed files with 2011 additions and 127 deletions

View File

@@ -1,5 +1,4 @@
import { Command } from 'commander';
import fs from 'node:fs';
import type { ApiClient } from '../api-client.js';
export interface BackupDeps {
@@ -7,24 +6,110 @@ export interface BackupDeps {
log: (...args: unknown[]) => void;
}
interface BackupStatus {
enabled: boolean;
repoUrl: string | null;
gitReachable: boolean;
lastSyncAt: string | null;
lastPushAt: string | null;
lastError: string | null;
pendingCount: number;
}
interface LogEntry {
hash: string;
date: string;
author: string;
message: string;
manual: boolean;
}
export function createBackupCommand(deps: BackupDeps): Command {
const cmd = new Command('backup')
.description('Backup mcpctl configuration to a JSON file')
.option('-o, --output <path>', 'output file path', 'mcpctl-backup.json')
.option('-p, --password <password>', 'encrypt sensitive values with password')
.option('-r, --resources <types>', 'resource types to backup (comma-separated: servers,profiles,projects)')
.action(async (options: { output: string; password?: string; resources?: string }) => {
const body: Record<string, unknown> = {};
if (options.password) {
body.password = options.password;
}
if (options.resources) {
body.resources = options.resources.split(',').map((s) => s.trim());
.description('Git-based backup status and management')
.action(async () => {
const status = await deps.client.get<BackupStatus>('/api/v1/backup/status');
if (!status.enabled) {
deps.log('Backup: disabled (set MCPD_BACKUP_REPO to enable)');
return;
}
const bundle = await deps.client.post('/api/v1/backup', body);
fs.writeFileSync(options.output, JSON.stringify(bundle, null, 2), 'utf-8');
deps.log(`Backup saved to ${options.output}`);
deps.log(`Repo: ${status.repoUrl}`);
if (status.gitReachable) {
if (status.pendingCount === 0) {
deps.log('Status: synced');
} else {
deps.log(`Status: ${status.pendingCount} changes pending`);
}
} else {
deps.log('Status: disconnected');
}
if (status.lastSyncAt) {
const ago = timeAgo(status.lastSyncAt);
deps.log(`Last sync: ${ago}`);
}
if (status.lastPushAt) {
const ago = timeAgo(status.lastPushAt);
deps.log(`Last push: ${ago}`);
}
if (status.lastError) {
deps.log(`Error: ${status.lastError}`);
}
});
cmd
.command('log')
.description('Show backup commit history')
.option('-n, --limit <count>', 'number of commits to show', '20')
.action(async (opts: { limit: string }) => {
const { entries } = await deps.client.get<{ entries: LogEntry[] }>(
`/api/v1/backup/log?limit=${opts.limit}`,
);
if (entries.length === 0) {
deps.log('No backup history');
return;
}
// Header
const hashW = 9;
const dateW = 20;
const authorW = 15;
deps.log(
'COMMIT'.padEnd(hashW) +
'DATE'.padEnd(dateW) +
'AUTHOR'.padEnd(authorW) +
'MESSAGE',
);
for (const e of entries) {
const hash = e.hash.slice(0, 7);
const date = new Date(e.date).toLocaleString('en-GB', {
day: '2-digit', month: '2-digit', year: 'numeric',
hour: '2-digit', minute: '2-digit',
});
const author = e.author.replace(/<.*>/, '').trim();
const marker = e.manual ? ' [manual]' : '';
deps.log(
hash.padEnd(hashW) +
date.padEnd(dateW) +
author.slice(0, authorW - 1).padEnd(authorW) +
e.message + marker,
);
}
});
cmd
.command('key')
.description('Show SSH public key for deploy key setup')
.action(async () => {
const { publicKey } = await deps.client.get<{ publicKey: string }>('/api/v1/backup/key');
deps.log(publicKey);
deps.log('');
deps.log('Add this key as a deploy key (with write access) in your Git hosting provider.');
});
return cmd;
@@ -32,49 +117,135 @@ export function createBackupCommand(deps: BackupDeps): Command {
export function createRestoreCommand(deps: BackupDeps): Command {
const cmd = new Command('restore')
.description('Restore mcpctl configuration from a backup file')
.option('-i, --input <path>', 'backup file path', 'mcpctl-backup.json')
.option('-p, --password <password>', 'decryption password for encrypted backups')
.option('-c, --conflict <strategy>', 'conflict resolution: skip, overwrite, fail', 'skip')
.action(async (options: { input: string; password?: string; conflict: string }) => {
if (!fs.existsSync(options.input)) {
deps.log(`Error: File not found: ${options.input}`);
.description('Restore mcpctl state from backup history');
cmd
.command('list')
.description('List available restore points')
.option('-n, --limit <count>', 'number of entries', '30')
.action(async (opts: { limit: string }) => {
const { entries } = await deps.client.get<{ entries: LogEntry[] }>(
`/api/v1/backup/log?limit=${opts.limit}`,
);
if (entries.length === 0) {
deps.log('No restore points available');
return;
}
const raw = fs.readFileSync(options.input, 'utf-8');
const bundle = JSON.parse(raw) as unknown;
deps.log(
'COMMIT'.padEnd(9) +
'DATE'.padEnd(20) +
'USER'.padEnd(15) +
'MESSAGE',
);
const body: Record<string, unknown> = {
bundle,
conflictStrategy: options.conflict,
};
if (options.password) {
body.password = options.password;
for (const e of entries) {
const hash = e.hash.slice(0, 7);
const date = new Date(e.date).toLocaleString('en-GB', {
day: '2-digit', month: '2-digit', year: 'numeric',
hour: '2-digit', minute: '2-digit',
});
const author = e.author.replace(/<.*>/, '').trim();
deps.log(
hash.padEnd(9) +
date.padEnd(20) +
author.slice(0, 14).padEnd(15) +
e.message,
);
}
});
cmd
.command('diff <commit>')
.description('Preview what restoring to a commit would change')
.action(async (commit: string) => {
const preview = await deps.client.post<{
targetCommit: string;
targetDate: string;
targetMessage: string;
added: string[];
removed: string[];
modified: string[];
}>('/api/v1/backup/restore/preview', { commit });
deps.log(`Target: ${preview.targetCommit.slice(0, 7)}${preview.targetMessage}`);
deps.log(`Date: ${new Date(preview.targetDate).toLocaleString()}`);
deps.log('');
if (preview.added.length === 0 && preview.removed.length === 0 && preview.modified.length === 0) {
deps.log('No changes — already at this state.');
return;
}
for (const f of preview.added) deps.log(` + ${f}`);
for (const f of preview.modified) deps.log(` ~ ${f}`);
for (const f of preview.removed) deps.log(` - ${f}`);
deps.log('');
deps.log(`Total: ${preview.added.length} added, ${preview.modified.length} modified, ${preview.removed.length} removed`);
});
cmd
.command('to <commit>')
.description('Restore to a specific commit')
.option('--force', 'skip confirmation', false)
.action(async (commit: string, opts: { force: boolean }) => {
// Show preview first
const preview = await deps.client.post<{
targetCommit: string;
targetDate: string;
targetMessage: string;
added: string[];
removed: string[];
modified: string[];
}>('/api/v1/backup/restore/preview', { commit });
const totalChanges = preview.added.length + preview.removed.length + preview.modified.length;
if (totalChanges === 0) {
deps.log('No changes — already at this state.');
return;
}
deps.log(`Restoring to ${preview.targetCommit.slice(0, 7)}${preview.targetMessage}`);
deps.log(` ${preview.added.length} added, ${preview.modified.length} modified, ${preview.removed.length} removed`);
if (!opts.force) {
deps.log('');
deps.log('Use --force to proceed. Current state will be saved as a timeline branch.');
return;
}
const result = await deps.client.post<{
serversCreated: number;
serversSkipped: number;
profilesCreated: number;
profilesSkipped: number;
projectsCreated: number;
projectsSkipped: number;
branchName: string;
applied: number;
deleted: number;
errors: string[];
}>('/api/v1/restore', body);
}>('/api/v1/backup/restore', { commit });
deps.log('Restore complete:');
deps.log(` Servers: ${result.serversCreated} created, ${result.serversSkipped} skipped`);
deps.log(` Profiles: ${result.profilesCreated} created, ${result.profilesSkipped} skipped`);
deps.log(` Projects: ${result.projectsCreated} created, ${result.projectsSkipped} skipped`);
deps.log('');
deps.log(`Restored: ${result.applied} applied, ${result.deleted} deleted`);
deps.log(`Previous state saved as branch '${result.branchName}'`);
if (result.errors.length > 0) {
deps.log(` Errors:`);
deps.log('Errors:');
for (const err of result.errors) {
deps.log(` - ${err}`);
deps.log(` - ${err}`);
}
}
});
return cmd;
}
function timeAgo(iso: string): string {
const ms = Date.now() - new Date(iso).getTime();
const secs = Math.floor(ms / 1000);
if (secs < 60) return `${secs}s ago`;
const mins = Math.floor(secs / 60);
if (mins < 60) return `${mins}m ago`;
const hours = Math.floor(mins / 60);
if (hours < 24) return `${hours}h ago`;
return `${Math.floor(hours / 24)}d ago`;
}

View File

@@ -1,5 +1,4 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import fs from 'node:fs';
import { describe, it, expect, vi, beforeEach } from 'vitest';
import { createBackupCommand, createRestoreCommand } from '../../src/commands/backup.js';
const mockClient = {
@@ -16,61 +15,97 @@ describe('backup command', () => {
vi.resetAllMocks();
});
afterEach(() => {
// Clean up any created files
try { fs.unlinkSync('test-backup.json'); } catch { /* ignore */ }
});
it('creates backup command', () => {
const cmd = createBackupCommand({ client: mockClient as never, log });
expect(cmd.name()).toBe('backup');
});
it('calls API and writes file', async () => {
const bundle = { version: '1', servers: [], profiles: [], projects: [] };
mockClient.post.mockResolvedValue(bundle);
const cmd = createBackupCommand({ client: mockClient as never, log });
await cmd.parseAsync(['-o', 'test-backup.json'], { from: 'user' });
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/backup', {});
expect(fs.existsSync('test-backup.json')).toBe(true);
expect(log).toHaveBeenCalledWith(expect.stringContaining('test-backup.json'));
});
it('passes password when provided', async () => {
mockClient.post.mockResolvedValue({ version: '1', servers: [], profiles: [], projects: [] });
const cmd = createBackupCommand({ client: mockClient as never, log });
await cmd.parseAsync(['-o', 'test-backup.json', '-p', 'secret'], { from: 'user' });
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/backup', { password: 'secret' });
});
it('passes resource filter', async () => {
mockClient.post.mockResolvedValue({ version: '1', servers: [], profiles: [], projects: [] });
const cmd = createBackupCommand({ client: mockClient as never, log });
await cmd.parseAsync(['-o', 'test-backup.json', '-r', 'servers,profiles'], { from: 'user' });
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/backup', {
resources: ['servers', 'profiles'],
it('shows status when enabled', async () => {
mockClient.get.mockResolvedValue({
enabled: true,
repoUrl: 'ssh://git@10.0.0.194:2222/michal/mcp-backup.git',
gitReachable: true,
lastSyncAt: new Date().toISOString(),
lastPushAt: null,
lastError: null,
pendingCount: 0,
});
const cmd = createBackupCommand({ client: mockClient as never, log });
await cmd.parseAsync([], { from: 'user' });
expect(mockClient.get).toHaveBeenCalledWith('/api/v1/backup/status');
expect(log).toHaveBeenCalledWith(expect.stringContaining('ssh://git@10.0.0.194:2222/michal/mcp-backup.git'));
expect(log).toHaveBeenCalledWith(expect.stringContaining('synced'));
});
it('shows disabled when not configured', async () => {
mockClient.get.mockResolvedValue({
enabled: false,
repoUrl: null,
gitReachable: false,
lastSyncAt: null,
lastPushAt: null,
lastError: null,
pendingCount: 0,
});
const cmd = createBackupCommand({ client: mockClient as never, log });
await cmd.parseAsync([], { from: 'user' });
expect(log).toHaveBeenCalledWith(expect.stringContaining('disabled'));
});
it('shows pending count', async () => {
mockClient.get.mockResolvedValue({
enabled: true,
repoUrl: 'ssh://git@host/repo.git',
gitReachable: true,
lastSyncAt: null,
lastPushAt: null,
lastError: null,
pendingCount: 5,
});
const cmd = createBackupCommand({ client: mockClient as never, log });
await cmd.parseAsync([], { from: 'user' });
expect(log).toHaveBeenCalledWith(expect.stringContaining('5 changes pending'));
});
it('shows SSH public key', async () => {
mockClient.get.mockResolvedValue({ publicKey: 'ssh-ed25519 AAAA... mcpd@mcpctl.local' });
const cmd = createBackupCommand({ client: mockClient as never, log });
await cmd.parseAsync(['key'], { from: 'user' });
expect(mockClient.get).toHaveBeenCalledWith('/api/v1/backup/key');
expect(log).toHaveBeenCalledWith('ssh-ed25519 AAAA... mcpd@mcpctl.local');
});
it('shows commit log', async () => {
mockClient.get.mockResolvedValue({
entries: [
{ hash: 'abc1234567890', date: '2026-03-08T10:00:00Z', author: 'mcpd <mcpd@mcpctl.local>', message: 'Update server grafana', manual: false },
{ hash: 'def4567890123', date: '2026-03-07T09:00:00Z', author: 'Michal <michal@test.com>', message: 'Manual fix', manual: true },
],
});
const cmd = createBackupCommand({ client: mockClient as never, log });
await cmd.parseAsync(['log'], { from: 'user' });
expect(mockClient.get).toHaveBeenCalledWith('/api/v1/backup/log?limit=20');
// Header
expect(log).toHaveBeenCalledWith(expect.stringContaining('COMMIT'));
// Entries
expect(log).toHaveBeenCalledWith(expect.stringContaining('abc1234'));
expect(log).toHaveBeenCalledWith(expect.stringContaining('[manual]'));
});
});
describe('restore command', () => {
const testFile = 'test-restore-input.json';
beforeEach(() => {
vi.resetAllMocks();
fs.writeFileSync(testFile, JSON.stringify({
version: '1', servers: [], profiles: [], projects: [],
}));
});
afterEach(() => {
try { fs.unlinkSync(testFile); } catch { /* ignore */ }
});
it('creates restore command', () => {
@@ -78,43 +113,105 @@ describe('restore command', () => {
expect(cmd.name()).toBe('restore');
});
it('reads file and calls API', async () => {
mockClient.post.mockResolvedValue({
serversCreated: 1, serversSkipped: 0,
profilesCreated: 0, profilesSkipped: 0,
projectsCreated: 0, projectsSkipped: 0,
errors: [],
it('lists restore points', async () => {
mockClient.get.mockResolvedValue({
entries: [
{ hash: 'abc1234567890', date: '2026-03-08T10:00:00Z', author: 'mcpd <mcpd@mcpctl.local>', message: 'Sync' },
],
});
const cmd = createRestoreCommand({ client: mockClient as never, log });
await cmd.parseAsync(['-i', testFile], { from: 'user' });
await cmd.parseAsync(['list'], { from: 'user' });
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/restore', expect.objectContaining({
bundle: expect.objectContaining({ version: '1' }),
conflictStrategy: 'skip',
}));
expect(log).toHaveBeenCalledWith('Restore complete:');
expect(mockClient.get).toHaveBeenCalledWith('/api/v1/backup/log?limit=30');
expect(log).toHaveBeenCalledWith(expect.stringContaining('abc1234'));
});
it('reports errors from restore', async () => {
it('shows restore diff preview', async () => {
mockClient.post.mockResolvedValue({
serversCreated: 0, serversSkipped: 0,
profilesCreated: 0, profilesSkipped: 0,
projectsCreated: 0, projectsSkipped: 0,
errors: ['Server "x" already exists'],
targetCommit: 'abc1234567890',
targetDate: '2026-03-08T10:00:00Z',
targetMessage: 'Snapshot',
added: ['servers/new.yaml'],
removed: ['servers/old.yaml'],
modified: ['projects/default.yaml'],
});
const cmd = createRestoreCommand({ client: mockClient as never, log });
await cmd.parseAsync(['-i', testFile], { from: 'user' });
await cmd.parseAsync(['diff', 'abc1234'], { from: 'user' });
expect(log).toHaveBeenCalledWith(expect.stringContaining('Errors'));
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/backup/restore/preview', { commit: 'abc1234' });
expect(log).toHaveBeenCalledWith(expect.stringContaining('+ servers/new.yaml'));
expect(log).toHaveBeenCalledWith(expect.stringContaining('- servers/old.yaml'));
expect(log).toHaveBeenCalledWith(expect.stringContaining('~ projects/default.yaml'));
});
it('logs error for missing file', async () => {
const cmd = createRestoreCommand({ client: mockClient as never, log });
await cmd.parseAsync(['-i', 'nonexistent.json'], { from: 'user' });
it('requires --force for restore', async () => {
mockClient.post.mockResolvedValue({
targetCommit: 'abc1234567890',
targetDate: '2026-03-08T10:00:00Z',
targetMessage: 'Snapshot',
added: ['servers/new.yaml'],
removed: [],
modified: [],
});
expect(log).toHaveBeenCalledWith(expect.stringContaining('not found'));
expect(mockClient.post).not.toHaveBeenCalled();
const cmd = createRestoreCommand({ client: mockClient as never, log });
await cmd.parseAsync(['to', 'abc1234'], { from: 'user' });
// Should show preview but NOT call restore endpoint
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/backup/restore/preview', { commit: 'abc1234' });
expect(mockClient.post).not.toHaveBeenCalledWith('/api/v1/backup/restore', expect.anything());
expect(log).toHaveBeenCalledWith(expect.stringContaining('--force'));
});
it('executes restore with --force', async () => {
// First call: preview, second call: restore
mockClient.post
.mockResolvedValueOnce({
targetCommit: 'abc1234567890',
targetDate: '2026-03-08T10:00:00Z',
targetMessage: 'Snapshot',
added: ['servers/new.yaml'],
removed: [],
modified: [],
})
.mockResolvedValueOnce({
branchName: 'timeline/20260308-100000',
applied: 1,
deleted: 0,
errors: [],
});
const cmd = createRestoreCommand({ client: mockClient as never, log });
await cmd.parseAsync(['to', 'abc1234', '--force'], { from: 'user' });
expect(mockClient.post).toHaveBeenCalledWith('/api/v1/backup/restore', { commit: 'abc1234' });
expect(log).toHaveBeenCalledWith(expect.stringContaining('1 applied'));
expect(log).toHaveBeenCalledWith(expect.stringContaining('timeline/20260308-100000'));
});
it('reports restore errors', async () => {
mockClient.post
.mockResolvedValueOnce({
targetCommit: 'abc1234567890',
targetDate: '2026-03-08T10:00:00Z',
targetMessage: 'Snapshot',
added: [],
removed: [],
modified: ['servers/broken.yaml'],
})
.mockResolvedValueOnce({
branchName: 'timeline/20260308-100000',
applied: 0,
deleted: 0,
errors: ['Failed to apply servers/broken.yaml: invalid YAML'],
});
const cmd = createRestoreCommand({ client: mockClient as never, log });
await cmd.parseAsync(['to', 'abc1234', '--force'], { from: 'user' });
expect(log).toHaveBeenCalledWith('Errors:');
expect(log).toHaveBeenCalledWith(expect.stringContaining('invalid YAML'));
});
});

View File

@@ -0,0 +1,14 @@
-- Backup pending queue for git-based backup sync
CREATE TABLE "BackupPending" (
"id" TEXT NOT NULL,
"resourceKind" TEXT NOT NULL,
"resourceName" TEXT NOT NULL,
"action" TEXT NOT NULL,
"userName" TEXT NOT NULL,
"yamlContent" TEXT,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "BackupPending_pkey" PRIMARY KEY ("id")
);
CREATE INDEX "BackupPending_createdAt_idx" ON "BackupPending"("createdAt");

View File

@@ -299,6 +299,20 @@ model AuditEvent {
@@index([userName])
}
// ── Backup Pending Queue ──
model BackupPending {
id String @id @default(cuid())
resourceKind String
resourceName String
action String // 'create' | 'update' | 'delete'
userName String
yamlContent String? @db.Text
createdAt DateTime @default(now())
@@index([createdAt])
}
// ── Audit Logs ──
model AuditLog {

View File

@@ -63,7 +63,10 @@ import {
registerAuditEventRoutes,
} from './routes/index.js';
import { registerPromptRoutes } from './routes/prompts.js';
import { registerGitBackupRoutes } from './routes/git-backup.js';
import { PromptService } from './services/prompt.service.js';
import { GitBackupService } from './services/backup/git-backup.service.js';
import type { BackupKind } from './services/backup/yaml-serializer.js';
import { ResourceRuleRegistry } from './validation/resource-rules.js';
import { systemPromptVarsRule } from './validation/rules/system-prompt-vars.js';
@@ -389,6 +392,84 @@ async function main(): Promise<void> {
registerGroupRoutes(app, groupService);
registerPromptRoutes(app, promptService, projectRepo);
// ── Git-based backup ──
const gitBackup = new GitBackupService(prisma);
// Hook: enqueue backup after successful mutations
if (gitBackup.enabled) {
const kindFromSegment: Record<string, BackupKind | undefined> = {
servers: 'server', secrets: 'secret', projects: 'project',
templates: 'template', users: 'user', groups: 'group',
rbac: 'rbac', prompts: 'prompt',
};
app.addHook('onSend', async (request, reply, payload) => {
if (reply.statusCode >= 400) return payload;
const method = request.method;
if (method === 'GET' || method === 'HEAD') return payload;
const urlMatch = request.url.match(/^\/api\/v1\/([a-z-]+)(?:\/([^/?]+))?/);
if (!urlMatch) return payload;
const kind = kindFromSegment[urlMatch[1]!];
if (!kind) return payload;
let action: 'create' | 'update' | 'delete';
if (method === 'DELETE') action = 'delete';
else if (method === 'POST') action = 'create';
else action = 'update';
// Get resource name: from URL for update/delete, from response body for create
const nameField = kind === 'user' ? 'email' : 'name';
let resourceName = urlMatch[2];
if (!resourceName && typeof payload === 'string') {
try {
const body = JSON.parse(payload);
resourceName = body[nameField];
} catch { /* ignore parse errors */ }
}
if (!resourceName) return payload;
const userName = request.userId ?? 'system';
gitBackup.enqueue(kind, resourceName, action, userName).catch((err) => {
app.log.error({ err }, `Git backup enqueue failed for ${kind}/${resourceName}`);
});
return payload;
});
}
if (gitBackup.enabled) {
// Import callback: apply a parsed YAML doc to the DB via services
const importResource = async (kind: BackupKind, _name: string, doc: Record<string, unknown>) => {
const data = { ...doc };
delete data.kind; // strip the kind field before passing to service
switch (kind) {
case 'server': await serverService.upsertByName(data); break;
case 'secret': await secretService.upsertByName(data); break;
case 'project': await projectService.upsertByName(data, 'system'); break;
case 'user': await userService.upsertByEmail(data); break;
case 'group': await groupService.upsertByName(data); break;
case 'rbac': await rbacDefinitionService.upsertByName(data); break;
case 'prompt': await promptService.upsertByName(data); break;
case 'template': await templateService.upsertByName(data); break;
}
};
const deleteResource = async (kind: BackupKind, name: string) => {
switch (kind) {
case 'server': await serverService.deleteByName(name); break;
case 'secret': await secretService.deleteByName(name); break;
case 'project': await projectService.deleteByName(name); break;
case 'user': await userService.deleteByEmail(name); break;
case 'group': await groupService.deleteByName(name); break;
case 'rbac': await rbacDefinitionService.deleteByName(name); break;
case 'prompt': await promptService.deleteByName(name); break;
case 'template': await templateService.deleteByName(name); break;
}
};
gitBackup.setCallbacks(importResource, deleteResource);
registerGitBackupRoutes(app, gitBackup);
// Init async — don't block server startup
gitBackup.init().catch((err) => app.log.error({ err }, 'Git backup init failed'));
}
// ── RBAC list filtering hook ──
// Filters array responses to only include resources the user is allowed to see.
app.addHook('preSerialization', async (request, _reply, payload) => {
@@ -428,6 +509,7 @@ async function main(): Promise<void> {
disconnectDb: async () => {
clearInterval(syncTimer);
healthProbeRunner.stop();
gitBackup.stop();
await prisma.$disconnect();
},
});

View File

@@ -8,6 +8,7 @@ export interface IUserRepository {
findById(id: string): Promise<SafeUser | null>;
findByEmail(email: string, includeHash?: boolean): Promise<SafeUser | null> | Promise<User | null>;
create(data: { email: string; passwordHash: string; name?: string; role?: string }): Promise<SafeUser>;
update(id: string, data: { name?: string; role?: string }): Promise<SafeUser>;
delete(id: string): Promise<void>;
count(): Promise<number>;
}
@@ -66,6 +67,17 @@ export class UserRepository implements IUserRepository {
});
}
async update(id: string, data: { name?: string; role?: string }): Promise<SafeUser> {
const updateData: Record<string, unknown> = {};
if (data.name !== undefined) updateData['name'] = data.name;
if (data.role !== undefined) updateData['role'] = data.role;
return this.prisma.user.update({
where: { id },
data: updateData,
select: safeSelect,
});
}
async delete(id: string): Promise<void> {
await this.prisma.user.delete({ where: { id } });
}

View File

@@ -0,0 +1,53 @@
import type { FastifyInstance } from 'fastify';
import type { GitBackupService } from '../services/backup/git-backup.service.js';
export function registerGitBackupRoutes(app: FastifyInstance, gitBackup: GitBackupService): void {
// GET /api/v1/backup/status — sync status
app.get('/api/v1/backup/status', async () => {
return gitBackup.getStatus();
});
// GET /api/v1/backup/key — SSH public key
app.get('/api/v1/backup/key', async (_req, reply) => {
const key = await gitBackup.getPublicKey();
if (!key) {
return reply.code(404).send({ error: 'SSH key not generated yet' });
}
return { publicKey: key };
});
// GET /api/v1/backup/log — commit history
app.get<{ Querystring: { limit?: string } }>('/api/v1/backup/log', async (request) => {
const limit = parseInt(request.query.limit ?? '50', 10);
const entries = await gitBackup.getLog(limit);
return { entries };
});
// POST /api/v1/backup/restore/preview — preview restore
app.post<{ Body: { commit: string } }>('/api/v1/backup/restore/preview', async (request, reply) => {
const { commit } = request.body ?? {};
if (!commit) {
return reply.code(400).send({ error: 'commit is required' });
}
try {
const preview = await gitBackup.previewRestore(commit);
return preview;
} catch (err) {
return reply.code(400).send({ error: `Invalid commit: ${err}` });
}
});
// POST /api/v1/backup/restore — restore to a commit
app.post<{ Body: { commit: string } }>('/api/v1/backup/restore', async (request, reply) => {
const { commit } = request.body ?? {};
if (!commit) {
return reply.code(400).send({ error: 'commit is required' });
}
try {
const result = await gitBackup.restoreTo(commit);
return result;
} catch (err) {
return reply.code(500).send({ error: `Restore failed: ${err}` });
}
});
}

View File

@@ -0,0 +1,723 @@
/**
* Git-based backup service.
*
* DB is always source of truth. Git is a downstream replica.
* The ONLY path from git → DB is explicit restore or importing manual commits.
*
* Manual commits (not by mcpd) are detected and imported if they don't conflict
* with pending DB changes. Conflicts are resolved in favor of the DB.
*/
import { execFile as execFileCb } from 'child_process';
import { promisify } from 'util';
import { mkdir, readFile, writeFile, unlink, readdir, access } from 'fs/promises';
import { join, dirname } from 'path';
import yaml from 'js-yaml';
import type { PrismaClient } from '@prisma/client';
import {
serializeAll, resourceToYaml, resourcePath, parseResourcePath,
BACKUP_KINDS, APPLY_ORDER, type BackupKind,
} from './yaml-serializer.js';
const execFile = promisify(execFileCb);
const BACKUP_DIR = process.env.MCPD_BACKUP_DIR ?? '/data/backup';
const REPO_DIR = join(BACKUP_DIR, 'repo');
const SSH_KEY_PATH = join(BACKUP_DIR, 'id_ed25519');
const MCPD_EMAIL = 'mcpd@mcpctl.local';
const SYNC_INTERVAL_MS = 30_000;
export interface BackupStatus {
enabled: boolean;
repoUrl: string | null;
gitReachable: boolean;
lastSyncAt: string | null;
lastPushAt: string | null;
lastError: string | null;
pendingCount: number;
}
export interface BackupLogEntry {
hash: string;
date: string;
author: string;
message: string;
manual: boolean; // true if not committed by mcpd
}
export interface RestorePreview {
targetCommit: string;
targetDate: string;
targetMessage: string;
added: string[];
removed: string[];
modified: string[];
}
/** Callback to apply a parsed YAML resource to the DB. */
export type ImportResourceFn = (kind: BackupKind, name: string, doc: Record<string, unknown>) => Promise<void>;
/** Callback to delete a resource from the DB. */
export type DeleteResourceFn = (kind: BackupKind, name: string) => Promise<void>;
export class GitBackupService {
private repoUrl: string | null;
private initialized = false;
private gitReachable = false;
private lastSyncAt: Date | null = null;
private lastPushAt: Date | null = null;
private lastError: string | null = null;
private syncTimer: ReturnType<typeof setInterval> | null = null;
private syncing = false;
private importResource: ImportResourceFn | null = null;
private deleteResource: DeleteResourceFn | null = null;
constructor(
private readonly prisma: PrismaClient,
repoUrl?: string,
) {
this.repoUrl = repoUrl ?? process.env.MCPD_BACKUP_REPO ?? null;
}
get enabled(): boolean {
return this.repoUrl !== null;
}
/** Set callbacks for importing/deleting resources (called from main.ts after services are ready). */
setCallbacks(importFn: ImportResourceFn, deleteFn: DeleteResourceFn): void {
this.importResource = importFn;
this.deleteResource = deleteFn;
}
/** Initialize: generate SSH key, clone/init repo, initial sync. */
async init(): Promise<void> {
if (!this.enabled) {
console.log('[git-backup] Disabled (no MCPD_BACKUP_REPO configured)');
return;
}
console.log(`[git-backup] Initializing with repo: ${this.repoUrl}`);
try {
await mkdir(BACKUP_DIR, { recursive: true });
await this.ensureSshKey();
await this.initRepo();
await this.initialSync();
this.initialized = true;
console.log('[git-backup] Initialized successfully');
} catch (err) {
this.lastError = String(err);
console.error(`[git-backup] Init failed (will retry in sync loop): ${err}`);
// Don't throw — mcpd should still start even if git is unavailable
}
this.startSyncLoop();
}
/** Stop the background sync loop. */
stop(): void {
if (this.syncTimer) {
clearInterval(this.syncTimer);
this.syncTimer = null;
}
}
// ── Public API ──
/** Enqueue a resource change for git sync. Called by service layer after DB mutations. */
async enqueue(kind: BackupKind, name: string, action: 'create' | 'update' | 'delete', userName: string): Promise<void> {
if (!this.enabled) return;
let yamlContent: string | null = null;
if (action !== 'delete') {
try {
yamlContent = await this.serializeResource(kind, name);
} catch (err) {
console.error(`[git-backup] Failed to serialize ${kind}/${name}: ${err}`);
return;
}
}
await this.prisma.backupPending.create({
data: { resourceKind: kind, resourceName: name, action, userName, yamlContent },
});
}
/** Get current backup status. */
async getStatus(): Promise<BackupStatus> {
const pendingCount = this.enabled
? await this.prisma.backupPending.count()
: 0;
return {
enabled: this.enabled,
repoUrl: this.repoUrl,
gitReachable: this.gitReachable,
lastSyncAt: this.lastSyncAt?.toISOString() ?? null,
lastPushAt: this.lastPushAt?.toISOString() ?? null,
lastError: this.lastError,
pendingCount,
};
}
/** Get the SSH public key. */
async getPublicKey(): Promise<string | null> {
try {
return (await readFile(`${SSH_KEY_PATH}.pub`, 'utf-8')).trim();
} catch {
return null;
}
}
/** Get commit history. */
async getLog(limit = 50): Promise<BackupLogEntry[]> {
if (!this.initialized) return [];
try {
const raw = await this.git('log', `--max-count=${limit}`, '--format=%H|%aI|%an <%ae>|%s|%ce');
if (!raw) return [];
return raw.split('\n').map((line) => {
const [hash, date, author, message, committerEmail] = line.split('|');
return {
hash: hash!,
date: date!,
author: author!,
message: message!,
manual: committerEmail !== MCPD_EMAIL,
};
});
} catch {
return [];
}
}
/** Preview what a restore to a specific commit would change. */
async previewRestore(commitHash: string): Promise<RestorePreview> {
const info = await this.git('log', '-1', '--format=%aI|%s', commitHash);
const [date, message] = info.split('|');
const diff = await this.git('diff', '--name-status', `${commitHash}..HEAD`);
const added: string[] = [];
const removed: string[] = [];
const modified: string[] = [];
for (const line of diff.split('\n')) {
if (!line) continue;
const [status, file] = line.split('\t');
if (!file || !parseResourcePath(file)) continue;
// Note: status is relative to commitHash→HEAD, so we invert for restore
if (status === 'A') removed.push(file); // file was added since target → restore removes it
else if (status === 'D') added.push(file); // file was deleted since target → restore adds it
else if (status === 'M') modified.push(file);
}
return { targetCommit: commitHash, targetDate: date!, targetMessage: message!, added, removed, modified };
}
/** Restore DB to the state at a specific commit. */
async restoreTo(commitHash: string): Promise<{ branchName: string; applied: number; deleted: number; errors: string[] }> {
if (!this.importResource || !this.deleteResource) {
throw new Error('Import/delete callbacks not set');
}
// 1. Save current timeline as a branch
const branchName = `timeline-${new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19)}`;
await this.git('branch', branchName);
console.log(`[git-backup] Saved current state as branch '${branchName}'`);
// 2. Reset to target commit
await this.git('reset', '--hard', commitHash);
// 3. Read all YAML files from the checkout
const files = await this.readRepoFiles();
const errors: string[] = [];
// 4. Collect what exists in DB now
const dbFiles = await serializeAll(this.prisma);
const dbResources = new Set<string>();
for (const path of dbFiles.keys()) {
dbResources.add(path);
}
// 5. Apply all files from the target commit (in dependency order)
let applied = 0;
const repoResources = new Set<string>();
for (const kind of APPLY_ORDER) {
for (const [filePath, content] of files) {
const parsed = parseResourcePath(filePath);
if (!parsed || parsed.kind !== kind) continue;
repoResources.add(filePath);
try {
const doc = yaml.load(content) as Record<string, unknown>;
if (!doc || typeof doc !== 'object') continue;
await this.importResource(kind, parsed.name, doc);
applied++;
} catch (err) {
errors.push(`${filePath}: ${err}`);
}
}
}
// 6. Delete resources not in the target commit
let deleted = 0;
for (const path of dbResources) {
if (!repoResources.has(path)) {
const parsed = parseResourcePath(path);
if (!parsed) continue;
try {
await this.deleteResource(parsed.kind, parsed.name);
deleted++;
} catch (err) {
errors.push(`delete ${path}: ${err}`);
}
}
}
// 7. Clear pending queue (we just wrote everything)
await this.prisma.backupPending.deleteMany();
// 8. Commit the restore action
await this.git('add', '-A');
const hasChanges = await this.hasUncommittedChanges();
if (hasChanges) {
await this.gitCommit(`restore to ${commitHash.slice(0, 7)} (from branch ${branchName})`, 'mcpd');
}
// 9. Push
await this.tryPush();
return { branchName, applied, deleted, errors };
}
// ── Git Operations ──
private async git(...args: string[]): Promise<string> {
const env = this.gitEnv();
const { stdout } = await execFile('git', args, { cwd: REPO_DIR, env, timeout: 30_000 });
return stdout.trim();
}
private async gitCommit(message: string, userName: string): Promise<void> {
const env = this.gitEnv(userName);
await execFile('git', ['commit', '-m', message], { cwd: REPO_DIR, env, timeout: 10_000 });
}
private gitEnv(authorName?: string): NodeJS.ProcessEnv {
return {
...process.env,
GIT_SSH_COMMAND: `ssh -i ${SSH_KEY_PATH} -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o BatchMode=yes`,
GIT_AUTHOR_NAME: authorName ?? 'mcpd',
GIT_AUTHOR_EMAIL: authorName && authorName !== 'mcpd' ? `${authorName}@mcpctl.local` : MCPD_EMAIL,
GIT_COMMITTER_NAME: 'mcpd',
GIT_COMMITTER_EMAIL: MCPD_EMAIL,
};
}
private async hasUncommittedChanges(): Promise<boolean> {
const status = await this.git('status', '--porcelain');
return status.length > 0;
}
// ── SSH Key ──
private async ensureSshKey(): Promise<void> {
try {
await access(SSH_KEY_PATH);
console.log('[git-backup] SSH key exists');
} catch {
console.log('[git-backup] Generating SSH key...');
await execFile('ssh-keygen', ['-t', 'ed25519', '-f', SSH_KEY_PATH, '-N', '', '-C', MCPD_EMAIL], {
timeout: 10_000,
});
console.log('[git-backup] SSH key generated');
}
}
// ── Repo Init ──
private async initRepo(): Promise<void> {
try {
await access(join(REPO_DIR, '.git'));
console.log('[git-backup] Repo already cloned');
return;
} catch {
// Not cloned yet
}
await mkdir(REPO_DIR, { recursive: true });
try {
// Try to clone
const env = this.gitEnv();
await execFile('git', ['clone', this.repoUrl!, REPO_DIR], { env, timeout: 60_000 });
this.gitReachable = true;
console.log('[git-backup] Cloned repo');
} catch (cloneErr) {
// Clone failed — maybe empty repo or network issue
// Init locally, set remote
console.log(`[git-backup] Clone failed (${cloneErr}), initializing locally`);
await execFile('git', ['init'], { cwd: REPO_DIR });
await execFile('git', ['remote', 'add', 'origin', this.repoUrl!], { cwd: REPO_DIR });
// Create initial commit so we have a branch
const env = this.gitEnv();
await writeFile(join(REPO_DIR, '.gitkeep'), '');
await execFile('git', ['add', '.gitkeep'], { cwd: REPO_DIR, env });
await execFile('git', ['commit', '-m', 'init'], { cwd: REPO_DIR, env });
this.gitReachable = false;
}
}
// ── Initial Sync ──
private async initialSync(): Promise<void> {
// Check if DB is fresh (no servers, no user-created projects)
const serverCount = await this.prisma.mcpServer.count();
const projectCount = await this.prisma.project.count();
const isFreshDb = serverCount === 0 && projectCount <= 1; // 1 = system project only
if (isFreshDb) {
// Fresh DB — try to restore from git
const files = await this.readRepoFiles();
if (files.size > 0 && this.importResource) {
console.log(`[git-backup] Fresh DB, restoring ${files.size} files from git...`);
await this.importFromFiles(files);
return;
}
}
// Existing DB — full resync (DB → git)
await this.fullResync();
}
/** Dump all DB resources to git, commit any changes. */
private async fullResync(): Promise<void> {
const files = await serializeAll(this.prisma);
let changed = false;
// Write all files
for (const [filePath, content] of files) {
const fullPath = join(REPO_DIR, filePath);
await mkdir(dirname(fullPath), { recursive: true });
let existing: string | null = null;
try {
existing = await readFile(fullPath, 'utf-8');
} catch { /* doesn't exist */ }
if (existing !== content + '\n') {
await writeFile(fullPath, content + '\n');
changed = true;
}
}
// Remove files not in DB
for (const kind of BACKUP_KINDS) {
const dir = kind === 'rbac' ? 'rbac' : `${kind}s`;
const dirPath = join(REPO_DIR, dir);
try {
const entries = await readdir(dirPath);
for (const entry of entries) {
if (!entry.endsWith('.yaml')) continue;
const filePath = `${dir}/${entry}`;
if (!files.has(filePath)) {
await unlink(join(REPO_DIR, filePath));
changed = true;
}
}
} catch { /* dir doesn't exist */ }
}
if (changed) {
await this.git('add', '-A');
if (await this.hasUncommittedChanges()) {
await this.gitCommit('sync: full resync from database', 'mcpd');
}
}
this.lastSyncAt = new Date();
}
// ── Sync Loop ──
private startSyncLoop(): void {
this.syncTimer = setInterval(() => {
if (!this.syncing) {
this.syncCycle().catch((err) => {
console.error(`[git-backup] Sync cycle error: ${err}`);
});
}
}, SYNC_INTERVAL_MS);
}
/** One sync cycle: fetch → import manual → process pending → push. */
private async syncCycle(): Promise<void> {
this.syncing = true;
try {
// 1. Fetch remote (detect connectivity)
const canFetch = await this.tryFetch();
// 2. Import manual commits (if remote is reachable)
if (canFetch) {
await this.importManualCommits();
// Merge remote into local
try {
await this.git('merge', 'origin/main', '--no-edit');
} catch {
// Merge conflict — resolve in favor of ours
try {
await this.git('checkout', '--ours', '.');
await this.git('add', '-A');
await this.gitCommit('merge: resolve conflict (DB wins)', 'mcpd');
} catch { /* no conflict files */ }
}
}
// 3. Process pending queue
await this.processPendingQueue();
// 4. Push
if (canFetch) {
await this.tryPush();
}
this.lastSyncAt = new Date();
if (this.lastError && canFetch) {
console.log('[git-backup] Reconnected, sync restored');
this.lastError = null;
}
} finally {
this.syncing = false;
}
}
private async tryFetch(): Promise<boolean> {
try {
await this.git('fetch', 'origin');
this.gitReachable = true;
return true;
} catch (err) {
this.gitReachable = false;
this.lastError = `fetch failed: ${err}`;
return false;
}
}
private async tryPush(): Promise<boolean> {
try {
await this.git('push', 'origin', 'HEAD');
this.lastPushAt = new Date();
this.gitReachable = true;
return true;
} catch (err) {
this.lastError = `push failed: ${err}`;
return false;
}
}
/** Find and import commits on remote that were NOT made by mcpd. */
private async importManualCommits(): Promise<void> {
if (!this.importResource) return;
try {
// Find commits on remote not yet merged locally
const raw = await this.git('log', 'HEAD..origin/main', '--format=%H|%ce', '--reverse');
if (!raw) return;
// Get pending resource keys for conflict detection
const pending = await this.prisma.backupPending.findMany({
select: { resourceKind: true, resourceName: true },
});
const pendingKeys = new Set(pending.map((p) => `${p.resourceKind}/${p.resourceName}`));
for (const line of raw.split('\n')) {
if (!line) continue;
const [hash, committerEmail] = line.split('|');
if (committerEmail === MCPD_EMAIL) continue; // Skip mcpd's own commits
console.log(`[git-backup] Detected manual commit: ${hash!.slice(0, 7)}`);
// Get files changed in this commit
const diff = await this.git('diff-tree', '--no-commit-id', '-r', '--name-status', hash!);
for (const diffLine of diff.split('\n')) {
if (!diffLine) continue;
const parts = diffLine.split('\t');
const statusChar = parts[0]!;
const filePath = parts[parts.length - 1]!; // Handle renames: last element is the target
const parsed = parseResourcePath(filePath);
if (!parsed) continue;
const key = `${parsed.kind}/${parsed.name}`;
if (pendingKeys.has(key)) {
console.log(`[git-backup] Conflict for ${key} — DB wins, skipping manual change`);
continue;
}
if (statusChar === 'D') {
// Manual deletion
try {
await this.deleteResource!(parsed.kind, parsed.name);
console.log(`[git-backup] Imported manual delete: ${key}`);
} catch (err) {
console.error(`[git-backup] Failed to import delete ${key}: ${err}`);
}
} else {
// Manual add/modify — read file content from that commit
try {
const content = await this.git('show', `${hash}:${filePath}`);
const doc = yaml.load(content) as Record<string, unknown>;
if (doc && typeof doc === 'object') {
await this.importResource!(parsed.kind, parsed.name, doc);
console.log(`[git-backup] Imported manual change: ${key}`);
}
} catch (err) {
console.error(`[git-backup] Failed to import ${key}: ${err}`);
}
}
}
}
} catch (err) {
console.error(`[git-backup] Error importing manual commits: ${err}`);
}
}
/** Process pending queue: write YAML files, commit each change, clear queue. */
private async processPendingQueue(): Promise<void> {
const entries = await this.prisma.backupPending.findMany({
orderBy: { createdAt: 'asc' },
});
if (entries.length === 0) return;
for (const entry of entries) {
const filePath = resourcePath(entry.resourceKind as BackupKind, entry.resourceName);
const fullPath = join(REPO_DIR, filePath);
try {
if (entry.action === 'delete') {
try {
await unlink(fullPath);
} catch { /* file may not exist */ }
} else {
await mkdir(dirname(fullPath), { recursive: true });
await writeFile(fullPath, (entry.yamlContent ?? '') + '\n');
}
await this.git('add', '-A');
if (await this.hasUncommittedChanges()) {
const message = `${entry.action} ${entry.resourceKind}/${entry.resourceName} (user: ${entry.userName})`;
await this.gitCommit(message, entry.userName);
}
// Remove processed entry
await this.prisma.backupPending.delete({ where: { id: entry.id } });
} catch (err) {
console.error(`[git-backup] Failed to process pending ${entry.resourceKind}/${entry.resourceName}: ${err}`);
// Don't delete — will retry next cycle
break; // Stop processing to maintain order
}
}
}
// ── Helpers ──
/** Serialize a single resource to YAML by querying the DB. */
private async serializeResource(kind: BackupKind, name: string): Promise<string> {
switch (kind) {
case 'server': {
const r = await this.prisma.mcpServer.findUnique({ where: { name } });
if (!r) throw new Error(`Server not found: ${name}`);
return resourceToYaml('server', r as unknown as Record<string, unknown>);
}
case 'secret': {
const r = await this.prisma.secret.findUnique({ where: { name } });
if (!r) throw new Error(`Secret not found: ${name}`);
return resourceToYaml('secret', r as unknown as Record<string, unknown>);
}
case 'project': {
const r = await this.prisma.project.findUnique({
where: { name },
include: { servers: { include: { server: { select: { name: true } } } } },
});
if (!r) throw new Error(`Project not found: ${name}`);
return resourceToYaml('project', r as unknown as Record<string, unknown>);
}
case 'user': {
const r = await this.prisma.user.findUnique({ where: { email: name } });
if (!r) throw new Error(`User not found: ${name}`);
return resourceToYaml('user', r as unknown as Record<string, unknown>);
}
case 'group': {
const r = await this.prisma.group.findUnique({
where: { name },
include: { members: { include: { user: { select: { email: true } } } } },
});
if (!r) throw new Error(`Group not found: ${name}`);
return resourceToYaml('group', r as unknown as Record<string, unknown>);
}
case 'rbac': {
const r = await this.prisma.rbacDefinition.findUnique({ where: { name } });
if (!r) throw new Error(`RBAC definition not found: ${name}`);
return resourceToYaml('rbac', r as unknown as Record<string, unknown>);
}
case 'prompt': {
const r = await this.prisma.prompt.findFirst({
where: { name },
include: { project: { select: { name: true } } },
});
if (!r) throw new Error(`Prompt not found: ${name}`);
return resourceToYaml('prompt', r as unknown as Record<string, unknown>);
}
case 'template': {
const r = await this.prisma.mcpTemplate.findUnique({ where: { name } });
if (!r) throw new Error(`Template not found: ${name}`);
return resourceToYaml('template', r as unknown as Record<string, unknown>);
}
default:
throw new Error(`Unknown resource kind: ${kind}`);
}
}
/** Read all YAML files from the repo checkout. */
private async readRepoFiles(): Promise<Map<string, string>> {
const files = new Map<string, string>();
for (const kind of BACKUP_KINDS) {
const dir = kind === 'rbac' ? 'rbac' : `${kind}s`;
const dirPath = join(REPO_DIR, dir);
try {
const entries = await readdir(dirPath);
for (const entry of entries) {
if (!entry.endsWith('.yaml')) continue;
const filePath = `${dir}/${entry}`;
const content = await readFile(join(REPO_DIR, filePath), 'utf-8');
files.set(filePath, content);
}
} catch { /* dir doesn't exist */ }
}
return files;
}
/** Import all files from the repo into the DB. */
private async importFromFiles(files: Map<string, string>): Promise<void> {
if (!this.importResource) return;
for (const kind of APPLY_ORDER) {
for (const [filePath, content] of files) {
const parsed = parseResourcePath(filePath);
if (!parsed || parsed.kind !== kind) continue;
try {
const doc = yaml.load(content) as Record<string, unknown>;
if (doc && typeof doc === 'object') {
await this.importResource(kind, parsed.name, doc);
}
} catch (err) {
console.error(`[git-backup] Failed to import ${filePath}: ${err}`);
}
}
}
}
}

View File

@@ -0,0 +1,198 @@
/**
* Converts Prisma DB models to mcpctl-apply-compatible YAML.
* Produces output identical to `mcpctl get <resource> <name> -o yaml`.
*/
import yaml from 'js-yaml';
import type { PrismaClient } from '@prisma/client';
const INTERNAL_FIELDS = new Set([
'id', 'createdAt', 'updatedAt', 'version', 'ownerId', 'summary',
'chapters', 'linkStatus', 'serverId', 'passwordHash',
]);
const FIRST_KEYS = ['kind'];
const LAST_KEYS = ['link', 'content', 'prompt', 'data'];
/** Strip internal fields, transform relations, normalize — same logic as CLI's stripInternalFields. */
function toApplyDoc(kind: string, raw: Record<string, unknown>): Record<string, unknown> {
const result: Record<string, unknown> = { kind };
const isLinkedPrompt = !!raw.linkTarget;
for (const [key, value] of Object.entries(raw)) {
if (INTERNAL_FIELDS.has(key)) continue;
if (value === null || value === undefined) continue;
// Servers join array → string[] of names
if (key === 'servers' && Array.isArray(value)) {
const entries = value as Array<{ server?: { name: string } }>;
if (entries.length > 0 && entries[0]?.server) {
result.servers = entries.map((e) => e.server!.name);
} else {
result.servers = entries.length === 0 ? [] : value;
}
continue;
}
// linkTarget → link, strip content for linked prompts
if (key === 'linkTarget') {
if (value) {
result.link = value;
}
continue;
}
// Content is fetched from link source — don't include in YAML for linked prompts
if (key === 'content' && isLinkedPrompt) continue;
// Normalize proxyModel from gated
if (key === 'gated') continue; // handled with proxyModel
if (key === 'proxyModel') {
const pm = value as string;
result.proxyModel = pm || (raw.gated === false ? 'content-pipeline' : 'default');
continue;
}
// Project relation → project name
if (key === 'project' && typeof value === 'object' && value !== null) {
result.project = (value as { name: string }).name;
continue;
}
if (key === 'projectId') continue; // stripped, use project name
// Owner relation → strip
if (key === 'owner' && typeof value === 'object') continue;
// Group members → email array
if (key === 'members' && Array.isArray(value)) {
result.members = (value as Array<{ user?: { email: string } }>)
.map((m) => m.user?.email)
.filter(Boolean);
continue;
}
// ServerOverrides: keep as-is if not empty
if (key === 'serverOverrides') {
if (value && typeof value === 'object' && Object.keys(value as object).length > 0) {
result[key] = value;
}
continue;
}
result[key] = value;
}
return result;
}
/** Reorder keys: kind first, long fields last — matches CLI output format. */
function reorderKeys(obj: Record<string, unknown>): Record<string, unknown> {
const ordered: Record<string, unknown> = {};
for (const key of FIRST_KEYS) {
if (key in obj) ordered[key] = obj[key];
}
for (const key of Object.keys(obj)) {
if (!FIRST_KEYS.includes(key) && !LAST_KEYS.includes(key)) ordered[key] = obj[key];
}
for (const key of LAST_KEYS) {
if (key in obj) ordered[key] = obj[key];
}
return ordered;
}
/** Convert a single resource to YAML string (apply-compatible). */
export function resourceToYaml(kind: string, resource: Record<string, unknown>): string {
const doc = toApplyDoc(kind, resource);
const ordered = reorderKeys(doc);
return yaml.dump(ordered, { lineWidth: 120, noRefs: true }).trimEnd();
}
/** Compute the file path for a resource in the backup repo. */
export function resourcePath(kind: string, name: string): string {
const dir = kind === 'rbac' ? 'rbac' : `${kind}s`;
const safeName = name.replace(/[/\\:*?"<>|]/g, '_');
return `${dir}/${safeName}.yaml`;
}
/** Resource kinds that are backed up. */
export const BACKUP_KINDS = ['server', 'secret', 'project', 'user', 'group', 'rbac', 'prompt', 'template'] as const;
export type BackupKind = (typeof BACKUP_KINDS)[number];
/** Apply order: dependencies before dependents. */
export const APPLY_ORDER: BackupKind[] = ['secret', 'server', 'template', 'user', 'group', 'project', 'rbac', 'prompt'];
/** Parse a file path to extract kind and name. Returns null if path doesn't match backup structure. */
export function parseResourcePath(filePath: string): { kind: BackupKind; name: string } | null {
const match = filePath.match(/^(\w+)\/(.+)\.yaml$/);
if (!match) return null;
const [, dir, name] = match;
// Map directory back to kind
const kindMap: Record<string, BackupKind> = {
servers: 'server', secrets: 'secret', projects: 'project',
users: 'user', groups: 'group', rbac: 'rbac',
prompts: 'prompt', templates: 'template',
};
const kind = kindMap[dir!];
if (!kind) return null;
return { kind, name: name! };
}
/** Dump all resources from DB to a map of filePath → yamlContent. */
export async function serializeAll(prisma: PrismaClient): Promise<Map<string, string>> {
const files = new Map<string, string>();
// Servers
const servers = await prisma.mcpServer.findMany();
for (const s of servers) {
files.set(resourcePath('server', s.name), resourceToYaml('server', s as unknown as Record<string, unknown>));
}
// Secrets
const secrets = await prisma.secret.findMany();
for (const s of secrets) {
files.set(resourcePath('secret', s.name), resourceToYaml('secret', s as unknown as Record<string, unknown>));
}
// Projects (with server names)
const projects = await prisma.project.findMany({
include: { servers: { include: { server: { select: { name: true } } } } },
});
for (const p of projects) {
files.set(resourcePath('project', p.name), resourceToYaml('project', p as unknown as Record<string, unknown>));
}
// Users (without password hash)
const users = await prisma.user.findMany();
for (const u of users) {
files.set(resourcePath('user', u.email), resourceToYaml('user', u as unknown as Record<string, unknown>));
}
// Groups (with member emails)
const groups = await prisma.group.findMany({
include: { members: { include: { user: { select: { email: true } } } } },
});
for (const g of groups) {
files.set(resourcePath('group', g.name), resourceToYaml('group', g as unknown as Record<string, unknown>));
}
// RBAC definitions
const rbacs = await prisma.rbacDefinition.findMany();
for (const r of rbacs) {
files.set(resourcePath('rbac', r.name), resourceToYaml('rbac', r as unknown as Record<string, unknown>));
}
// Prompts (with project name)
const prompts = await prisma.prompt.findMany({
include: { project: { select: { name: true } } },
});
for (const p of prompts) {
files.set(resourcePath('prompt', p.name), resourceToYaml('prompt', p as unknown as Record<string, unknown>));
}
// Templates
const templates = await prisma.mcpTemplate.findMany();
for (const t of templates) {
files.set(resourcePath('template', t.name), resourceToYaml('template', t as unknown as Record<string, unknown>));
}
return files;
}

View File

@@ -86,4 +86,41 @@ export class GroupService {
}
return userIds;
}
// ── Backup/restore helpers ──
async upsertByName(data: Record<string, unknown>): Promise<GroupWithMembers> {
const name = data['name'] as string;
const members = (data['members'] ?? []) as string[];
const existing = await this.groupRepo.findByName(name);
if (existing !== null) {
if (data['description'] !== undefined) {
await this.groupRepo.update(existing.id, { description: data['description'] as string });
}
if (members.length > 0) {
const userIds = await this.resolveEmails(members);
await this.groupRepo.setMembers(existing.id, userIds);
}
return this.getById(existing.id);
}
const createData: { name: string; description?: string } = { name };
if (data['description'] !== undefined) createData.description = data['description'] as string;
const group = await this.groupRepo.create(createData);
if (members.length > 0) {
const userIds = await this.resolveEmails(members);
await this.groupRepo.setMembers(group.id, userIds);
}
const result = await this.groupRepo.findById(group.id);
return result!;
}
async deleteByName(name: string): Promise<void> {
const existing = await this.groupRepo.findByName(name);
if (existing === null) return;
await this.groupRepo.delete(existing.id);
}
}

View File

@@ -62,6 +62,27 @@ export class McpServerService {
}
await this.repo.delete(id);
}
// ── Backup/restore helpers ──
async upsertByName(data: Record<string, unknown>): Promise<McpServer> {
const name = data['name'] as string;
const existing = await this.repo.findByName(name);
if (existing !== null) {
const { name: _, ...updateFields } = data;
return this.repo.update(existing.id, updateFields as Parameters<IMcpServerRepository['update']>[1]);
}
return this.repo.create(data as Parameters<IMcpServerRepository['create']>[0]);
}
async deleteByName(name: string): Promise<void> {
const existing = await this.repo.findByName(name);
if (existing === null) return;
if (this.instanceService) {
await this.instanceService.removeAllForServer(existing.id);
}
await this.repo.delete(existing.id);
}
}
export class NotFoundError extends Error {

View File

@@ -137,4 +137,52 @@ export class ProjectService {
return server.id;
}));
}
// ── Backup/restore helpers ──
async upsertByName(data: Record<string, unknown>, ownerId: string): Promise<ProjectWithRelations> {
const name = data['name'] as string;
const servers = (data['servers'] ?? []) as string[];
const existing = await this.projectRepo.findByName(name);
const scalarFields: Record<string, unknown> = {};
if (data['description'] !== undefined) scalarFields['description'] = data['description'];
if (data['prompt'] !== undefined) scalarFields['prompt'] = data['prompt'];
if (data['proxyModel'] !== undefined) scalarFields['proxyModel'] = data['proxyModel'];
if (data['gated'] !== undefined) scalarFields['gated'] = data['gated'];
if (data['llmProvider'] !== undefined) scalarFields['llmProvider'] = data['llmProvider'];
if (data['llmModel'] !== undefined) scalarFields['llmModel'] = data['llmModel'];
if (data['serverOverrides'] !== undefined) scalarFields['serverOverrides'] = data['serverOverrides'];
if (existing !== null) {
if (Object.keys(scalarFields).length > 0) {
await this.projectRepo.update(existing.id, scalarFields);
}
if (servers.length > 0) {
const serverIds = await this.resolveServerNames(servers);
await this.projectRepo.setServers(existing.id, serverIds);
}
return this.getById(existing.id);
}
const project = await this.projectRepo.create({
name,
description: (data['description'] as string) ?? '',
ownerId,
...scalarFields,
} as Parameters<IProjectRepository['create']>[0]);
if (servers.length > 0) {
const serverIds = await this.resolveServerNames(servers);
await this.projectRepo.setServers(project.id, serverIds);
}
return this.getById(project.id);
}
async deleteByName(name: string): Promise<void> {
const existing = await this.projectRepo.findByName(name);
if (existing === null) return;
await this.projectRepo.delete(existing.id);
}
}

View File

@@ -218,6 +218,52 @@ export class PromptService {
return prompt;
}
// ── Backup/restore helpers ──
async upsertByName(data: Record<string, unknown>): Promise<Prompt> {
const name = data['name'] as string;
let projectId: string | null = null;
// Resolve project name to ID if provided
if (data['project'] !== undefined) {
const project = await this.projectRepo.findByName(data['project'] as string);
if (project === null) throw new NotFoundError(`Project not found: ${data['project']}`);
projectId = project.id;
} else if (data['projectId'] !== undefined) {
projectId = data['projectId'] as string;
}
const existing = await this.promptRepo.findByNameAndProject(name, projectId);
if (existing !== null) {
const updateData: { content?: string; priority?: number } = {};
if (data['content'] !== undefined) updateData.content = data['content'] as string;
if (data['priority'] !== undefined) updateData.priority = data['priority'] as number;
if (Object.keys(updateData).length > 0) {
return this.promptRepo.update(existing.id, updateData);
}
return existing;
}
const createData: { name: string; content: string; projectId?: string; priority?: number; linkTarget?: string } = {
name,
content: (data['content'] as string) ?? '',
};
if (projectId !== null) createData.projectId = projectId;
if (data['priority'] !== undefined) createData.priority = data['priority'] as number;
if (data['linkTarget'] !== undefined) createData.linkTarget = data['linkTarget'] as string;
return this.promptRepo.create(createData);
}
async deleteByName(name: string): Promise<void> {
// Find first prompt with this name (across all projects)
const all = await this.promptRepo.findAll();
const match = all.find((p) => p.name === name);
if (match === undefined) return;
await this.promptRepo.delete(match.id);
}
// ── Visibility for MCP (approved prompts + session's pending requests) ──
async getVisiblePrompts(

View File

@@ -51,4 +51,22 @@ export class RbacDefinitionService {
await this.getById(id);
await this.repo.delete(id);
}
// ── Backup/restore helpers ──
async upsertByName(data: Record<string, unknown>): Promise<RbacDefinition> {
const name = data['name'] as string;
const existing = await this.repo.findByName(name);
if (existing !== null) {
const { name: _, ...updateFields } = data;
return this.repo.update(existing.id, updateFields as Parameters<IRbacDefinitionRepository['update']>[1]);
}
return this.repo.create(data as Parameters<IRbacDefinitionRepository['create']>[0]);
}
async deleteByName(name: string): Promise<void> {
const existing = await this.repo.findByName(name);
if (existing === null) return;
await this.repo.delete(existing.id);
}
}

View File

@@ -51,4 +51,22 @@ export class SecretService {
await this.getById(id);
await this.repo.delete(id);
}
// ── Backup/restore helpers ──
async upsertByName(data: Record<string, unknown>): Promise<Secret> {
const name = data['name'] as string;
const existing = await this.repo.findByName(name);
if (existing !== null) {
const { name: _, ...updateFields } = data;
return this.repo.update(existing.id, updateFields as Parameters<ISecretRepository['update']>[1]);
}
return this.repo.create(data as Parameters<ISecretRepository['create']>[0]);
}
async deleteByName(name: string): Promise<void> {
const existing = await this.repo.findByName(name);
if (existing === null) return;
await this.repo.delete(existing.id);
}
}

View File

@@ -50,4 +50,22 @@ export class TemplateService {
await this.getById(id);
await this.repo.delete(id);
}
// ── Backup/restore helpers ──
async upsertByName(data: Record<string, unknown>): Promise<McpTemplate> {
const name = data['name'] as string;
const existing = await this.repo.findByName(name);
if (existing !== null) {
const { name: _, ...updateFields } = data;
return this.repo.update(existing.id, updateFields as Parameters<ITemplateRepository['update']>[1]);
}
return this.repo.create(data as Parameters<ITemplateRepository['create']>[0]);
}
async deleteByName(name: string): Promise<void> {
const existing = await this.repo.findByName(name);
if (existing === null) return;
await this.repo.delete(existing.id);
}
}

View File

@@ -57,4 +57,38 @@ export class UserService {
async count(): Promise<number> {
return this.userRepo.count();
}
// ── Backup/restore helpers ──
async upsertByEmail(data: Record<string, unknown>): Promise<SafeUser> {
const email = data['email'] as string;
const existing = await this.userRepo.findByEmail(email);
if (existing !== null) {
// Update name/role but never overwrite passwordHash
const updateFields: { name?: string; role?: string } = {};
if (data['name'] !== undefined) updateFields.name = data['name'] as string;
if (data['role'] !== undefined) updateFields.role = data['role'] as string;
if (Object.keys(updateFields).length > 0) {
return this.userRepo.update(existing.id, updateFields);
}
return existing;
}
// New user — use placeholder passwordHash
const createData: { email: string; passwordHash: string; name?: string; role?: string } = {
email,
passwordHash: '__RESTORED__',
};
if (data['name'] !== undefined) createData.name = data['name'] as string;
if (data['role'] !== undefined) createData.role = data['role'] as string;
return this.userRepo.create(createData);
}
async deleteByEmail(email: string): Promise<void> {
const existing = await this.userRepo.findByEmail(email);
if (existing === null) return;
await this.userRepo.delete(existing.id);
}
}

View File

@@ -0,0 +1,233 @@
import { describe, it, expect } from 'vitest';
import { resourceToYaml, resourcePath, parseResourcePath, APPLY_ORDER } from '../src/services/backup/yaml-serializer.js';
describe('resourceToYaml', () => {
it('serializes a server', () => {
const yaml = resourceToYaml('server', {
id: 'srv-1',
name: 'grafana',
description: 'Grafana MCP',
dockerImage: 'mcp/grafana:latest',
transport: 'STDIO',
env: [{ name: 'API_KEY', value: 'secret' }],
version: 1,
createdAt: new Date(),
updatedAt: new Date(),
packageName: null,
repositoryUrl: null,
});
expect(yaml).toContain('kind: server');
expect(yaml).toContain('name: grafana');
expect(yaml).toContain('description: Grafana MCP');
expect(yaml).toContain('dockerImage: mcp/grafana:latest');
expect(yaml).toContain('transport: STDIO');
expect(yaml).not.toContain('id:');
expect(yaml).not.toContain('createdAt:');
expect(yaml).not.toContain('version:');
expect(yaml).not.toContain('packageName:'); // null values stripped
});
it('serializes a project with server names', () => {
const yaml = resourceToYaml('project', {
id: 'p-1',
name: 'my-project',
description: 'Test project',
proxyModel: 'default',
gated: true,
ownerId: 'user-1',
servers: [
{ id: 'ps-1', server: { name: 'grafana' } },
{ id: 'ps-2', server: { name: 'node-red' } },
],
llmProvider: 'openai',
llmModel: null,
version: 1,
createdAt: new Date(),
updatedAt: new Date(),
});
expect(yaml).toContain('kind: project');
expect(yaml).toContain('name: my-project');
expect(yaml).toContain('proxyModel: default');
expect(yaml).toContain('- grafana');
expect(yaml).toContain('- node-red');
expect(yaml).toContain('llmProvider: openai');
expect(yaml).not.toContain('gated:');
expect(yaml).not.toContain('ownerId:');
expect(yaml).not.toContain('llmModel:'); // null stripped
});
it('normalizes proxyModel from gated boolean', () => {
const yaml1 = resourceToYaml('project', {
name: 'p1',
proxyModel: '',
gated: false,
servers: [],
});
expect(yaml1).toContain('proxyModel: content-pipeline');
const yaml2 = resourceToYaml('project', {
name: 'p2',
proxyModel: '',
gated: true,
servers: [],
});
expect(yaml2).toContain('proxyModel: default');
});
it('serializes a secret', () => {
const yaml = resourceToYaml('secret', {
id: 's-1',
name: 'my-secret',
data: { TOKEN: 'abc123', KEY: 'xyz' },
version: 1,
createdAt: new Date(),
updatedAt: new Date(),
});
expect(yaml).toContain('kind: secret');
expect(yaml).toContain('name: my-secret');
expect(yaml).toContain('TOKEN: abc123');
expect(yaml).toContain('KEY: xyz');
});
it('serializes a user without passwordHash', () => {
const yaml = resourceToYaml('user', {
id: 'u-1',
email: 'michal@test.com',
name: 'Michal',
role: 'ADMIN',
passwordHash: '$2b$10$secret',
version: 1,
createdAt: new Date(),
updatedAt: new Date(),
});
expect(yaml).toContain('kind: user');
expect(yaml).toContain('email: michal@test.com');
expect(yaml).toContain('name: Michal');
expect(yaml).toContain('role: ADMIN');
expect(yaml).not.toContain('passwordHash');
});
it('serializes a group with member emails', () => {
const yaml = resourceToYaml('group', {
id: 'g-1',
name: 'dev-team',
description: 'Developers',
members: [
{ user: { email: 'alice@test.com' } },
{ user: { email: 'bob@test.com' } },
],
version: 1,
createdAt: new Date(),
updatedAt: new Date(),
});
expect(yaml).toContain('kind: group');
expect(yaml).toContain('name: dev-team');
expect(yaml).toContain('- alice@test.com');
expect(yaml).toContain('- bob@test.com');
});
it('serializes a prompt with project name', () => {
const yaml = resourceToYaml('prompt', {
id: 'pr-1',
name: 'system-instructions',
content: 'You are a helpful assistant.',
priority: 5,
project: { name: 'my-project' },
projectId: 'p-1',
summary: 'Summary text',
chapters: ['ch1'],
linkTarget: null,
version: 1,
createdAt: new Date(),
updatedAt: new Date(),
});
expect(yaml).toContain('kind: prompt');
expect(yaml).toContain('name: system-instructions');
expect(yaml).toContain('project: my-project');
expect(yaml).toContain('priority: 5');
expect(yaml).toContain('content: You are a helpful assistant.');
expect(yaml).not.toContain('projectId:');
expect(yaml).not.toContain('summary:');
expect(yaml).not.toContain('chapters:');
});
it('serializes a linked prompt with link field', () => {
const yaml = resourceToYaml('prompt', {
id: 'pr-2',
name: 'linked-prompt',
content: 'Fetched content',
linkTarget: 'my-project/grafana:resource://docs',
project: { name: 'my-project' },
projectId: 'p-1',
priority: 3,
version: 1,
createdAt: new Date(),
updatedAt: new Date(),
});
expect(yaml).toContain('link: my-project/grafana:resource://docs');
expect(yaml).not.toContain('content:'); // content stripped for linked prompts
expect(yaml).not.toContain('linkTarget:');
});
it('puts kind first and content/data last', () => {
const yaml = resourceToYaml('secret', {
name: 'test',
data: { KEY: 'val' },
});
const lines = yaml.split('\n');
expect(lines[0]).toBe('kind: secret');
// data should be after name
const nameIdx = lines.findIndex((l) => l.startsWith('name:'));
const dataIdx = lines.findIndex((l) => l.startsWith('data:'));
expect(dataIdx).toBeGreaterThan(nameIdx);
});
});
describe('resourcePath', () => {
it('maps kinds to directories', () => {
expect(resourcePath('server', 'grafana')).toBe('servers/grafana.yaml');
expect(resourcePath('secret', 'my-token')).toBe('secrets/my-token.yaml');
expect(resourcePath('project', 'default')).toBe('projects/default.yaml');
expect(resourcePath('rbac', 'admins')).toBe('rbac/admins.yaml');
expect(resourcePath('user', 'michal@test.com')).toBe('users/michal@test.com.yaml');
});
it('sanitizes unsafe characters', () => {
expect(resourcePath('server', 'my/server')).toBe('servers/my_server.yaml');
});
});
describe('parseResourcePath', () => {
it('parses valid paths', () => {
expect(parseResourcePath('servers/grafana.yaml')).toEqual({ kind: 'server', name: 'grafana' });
expect(parseResourcePath('secrets/my-token.yaml')).toEqual({ kind: 'secret', name: 'my-token' });
expect(parseResourcePath('rbac/admins.yaml')).toEqual({ kind: 'rbac', name: 'admins' });
});
it('returns null for invalid paths', () => {
expect(parseResourcePath('README.md')).toBeNull();
expect(parseResourcePath('.gitkeep')).toBeNull();
expect(parseResourcePath('unknown/file.yaml')).toBeNull();
});
});
describe('APPLY_ORDER', () => {
it('has secrets before servers before projects', () => {
const si = APPLY_ORDER.indexOf('secret');
const sv = APPLY_ORDER.indexOf('server');
const pr = APPLY_ORDER.indexOf('project');
expect(si).toBeLessThan(sv);
expect(sv).toBeLessThan(pr);
});
it('has all backup kinds', () => {
expect(APPLY_ORDER).toHaveLength(8);
});
});