fix: rate limiting breaking smoke tests and backup routes 404 when disabled
- Exempt /healthz and /health from rate limiter - Increase rate limit from 500 to 2000 req/min - Register backup routes even when disabled (status shows disabled) - Guard restore endpoints with 503 when backup not configured - Add retry with backoff on 429 in audit smoke tests Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -465,11 +465,13 @@ async function main(): Promise<void> {
|
||||
}
|
||||
};
|
||||
gitBackup.setCallbacks(importResource, deleteResource);
|
||||
registerGitBackupRoutes(app, gitBackup);
|
||||
// Init async — don't block server startup
|
||||
gitBackup.init().catch((err) => app.log.error({ err }, 'Git backup init failed'));
|
||||
}
|
||||
|
||||
// Always register backup routes (status shows disabled when no repo configured)
|
||||
registerGitBackupRoutes(app, gitBackup);
|
||||
|
||||
// ── RBAC list filtering hook ──
|
||||
// Filters array responses to only include resources the user is allowed to see.
|
||||
app.addHook('preSerialization', async (request, _reply, payload) => {
|
||||
|
||||
@@ -20,5 +20,10 @@ export async function registerSecurityPlugins(
|
||||
await app.register(rateLimit, {
|
||||
max: config.rateLimitMax,
|
||||
timeWindow: config.rateLimitWindowMs,
|
||||
allowList: (req) => {
|
||||
// Exempt health probes and internal monitoring from rate limiting
|
||||
const url = req.url;
|
||||
return url === '/healthz' || url === '/health';
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -25,6 +25,9 @@ export function registerGitBackupRoutes(app: FastifyInstance, gitBackup: GitBack
|
||||
|
||||
// POST /api/v1/backup/restore/preview — preview restore
|
||||
app.post<{ Body: { commit: string } }>('/api/v1/backup/restore/preview', async (request, reply) => {
|
||||
if (!gitBackup.enabled) {
|
||||
return reply.code(503).send({ error: 'Backup is not configured' });
|
||||
}
|
||||
const { commit } = request.body ?? {};
|
||||
if (!commit) {
|
||||
return reply.code(400).send({ error: 'commit is required' });
|
||||
@@ -39,6 +42,9 @@ export function registerGitBackupRoutes(app: FastifyInstance, gitBackup: GitBack
|
||||
|
||||
// POST /api/v1/backup/restore — restore to a commit
|
||||
app.post<{ Body: { commit: string } }>('/api/v1/backup/restore', async (request, reply) => {
|
||||
if (!gitBackup.enabled) {
|
||||
return reply.code(503).send({ error: 'Backup is not configured' });
|
||||
}
|
||||
const { commit } = request.body ?? {};
|
||||
if (!commit) {
|
||||
return reply.code(400).send({ error: 'commit is required' });
|
||||
|
||||
@@ -65,24 +65,32 @@ interface AuditSessionResult {
|
||||
total: number;
|
||||
}
|
||||
|
||||
/** Fetch JSON from mcpd REST API (with auth from credentials). */
|
||||
function mcpdGet<T>(path: string): Promise<T> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const url = new URL(path, MCPD_EFFECTIVE_URL);
|
||||
const headers: Record<string, string> = { 'Accept': 'application/json' };
|
||||
if (MCPD_CREDS.token) headers['Authorization'] = `Bearer ${MCPD_CREDS.token}`;
|
||||
http.get(url, { timeout: 10_000, headers }, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
try {
|
||||
resolve(JSON.parse(Buffer.concat(chunks).toString('utf-8')) as T);
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
}).on('error', reject);
|
||||
});
|
||||
/** Fetch JSON from mcpd REST API (with auth from credentials). Retries on rate limit. */
|
||||
async function mcpdGet<T>(path: string, retries = 3): Promise<T> {
|
||||
for (let attempt = 0; attempt <= retries; attempt++) {
|
||||
const result = await new Promise<{ status: number; body: T }>((resolve, reject) => {
|
||||
const url = new URL(path, MCPD_EFFECTIVE_URL);
|
||||
const headers: Record<string, string> = { 'Accept': 'application/json' };
|
||||
if (MCPD_CREDS.token) headers['Authorization'] = `Bearer ${MCPD_CREDS.token}`;
|
||||
http.get(url, { timeout: 10_000, headers }, (res) => {
|
||||
const chunks: Buffer[] = [];
|
||||
res.on('data', (chunk: Buffer) => chunks.push(chunk));
|
||||
res.on('end', () => {
|
||||
try {
|
||||
resolve({ status: res.statusCode ?? 0, body: JSON.parse(Buffer.concat(chunks).toString('utf-8')) as T });
|
||||
} catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
}).on('error', reject);
|
||||
});
|
||||
if (result.status === 429 && attempt < retries) {
|
||||
await new Promise((r) => setTimeout(r, 2_000 * (attempt + 1)));
|
||||
continue;
|
||||
}
|
||||
return result.body;
|
||||
}
|
||||
throw new Error('mcpdGet: max retries exceeded');
|
||||
}
|
||||
|
||||
/** Query audit events from mcpd. */
|
||||
|
||||
@@ -30,7 +30,7 @@ services:
|
||||
MCPD_LOG_LEVEL: ${MCPD_LOG_LEVEL:-info}
|
||||
MCPD_NODE_RUNNER_IMAGE: mysources.co.uk/michal/mcpctl-node-runner:latest
|
||||
MCPD_PYTHON_RUNNER_IMAGE: mysources.co.uk/michal/mcpctl-python-runner:latest
|
||||
MCPD_RATE_LIMIT_MAX: "500"
|
||||
MCPD_RATE_LIMIT_MAX: "2000"
|
||||
MCPD_MCP_NETWORK: mcp-servers
|
||||
MCPD_BACKUP_REPO: ${MCPD_BACKUP_REPO:-}
|
||||
depends_on:
|
||||
|
||||
Reference in New Issue
Block a user