audit: production safety fixes, cleanup, and documentation overhaul

CRITICAL fixes:
- Fix SQL injection in geoportal search (template literal in $queryRaw)
- Preserve enrichment data during GIS re-sync (upsert update explicit fields only)
- Fix ePay version race condition (advisory lock in transaction)
- Add requireAuth() to compress-pdf and unlock routes (were unauthenticated)
- Remove hardcoded Stirling PDF API key (env vars now required)

IMPORTANT fixes:
- Add admin role check on registratura debug-sequences endpoint
- Fix reserved slot race condition with advisory lock in transaction
- Use SSO identity in close-guard-dialog instead of hardcoded "Utilizator"
- Storage DELETE catches only P2025 (not found), re-throws real errors
- Add onDelete: SetNull for GisFeature → GisSyncRun relation
- Move portal-only users to PORTAL_ONLY_USERS env var
- Add security headers (X-Frame-Options, X-Content-Type-Options, Referrer-Policy)
- Add periodic cleanup for eTerra/ePay session caches and progress store
- Log warning when ePay dataDocument is missing (expiry fallback)

Cleanup:
- Delete orphaned rgi-test page (1086 lines, unregistered, inaccessible)
- Delete legacy/ folder (5 files, unreferenced from src/)
- Remove unused ensureBucketExists() from minio-client.ts

Documentation:
- Optimize CLAUDE.md: 464 → 197 lines (moved per-module details to docs/)
- Create docs/ARCHITECTURE-QUICK.md (80 lines: data flow, deps, env vars)
- Create docs/MODULE-MAP.md (140 lines: entry points, API routes, cross-deps)

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
AI Assistant
2026-03-26 06:40:34 +02:00
parent c012adaa77
commit 0c4b91707f
25 changed files with 579 additions and 3405 deletions
@@ -54,11 +54,24 @@ type SessionEntry = {
const globalStore = globalThis as {
__epaySessionCache?: Map<string, SessionEntry>;
__epayCleanupTimer?: ReturnType<typeof setInterval>;
};
const sessionCache =
globalStore.__epaySessionCache ?? new Map<string, SessionEntry>();
globalStore.__epaySessionCache = sessionCache;
// Periodic cleanup of expired sessions (every 5 minutes, 9-min TTL)
if (!globalStore.__epayCleanupTimer) {
globalStore.__epayCleanupTimer = setInterval(() => {
const now = Date.now();
for (const [key, entry] of sessionCache.entries()) {
if (now - entry.lastUsed > 9 * 60_000) {
sessionCache.delete(key);
}
}
}, 5 * 60_000);
}
const makeCacheKey = (u: string, p: string) =>
crypto.createHash("sha256").update(`epay:${u}:${p}`).digest("hex");
+27 -22
View File
@@ -117,27 +117,29 @@ export async function enqueueBatch(
const items: QueueItem[] = [];
for (const input of inputs) {
// Create DB record in "queued" status
const record = await prisma.cfExtract.create({
data: {
nrCadastral: input.nrCadastral,
nrCF: input.nrCF ?? input.nrCadastral,
siruta: input.siruta,
judetIndex: input.judetIndex,
judetName: input.judetName,
uatId: input.uatId,
uatName: input.uatName,
gisFeatureId: input.gisFeatureId,
prodId: input.prodId ?? 14200,
status: "queued",
version:
((
await prisma.cfExtract.aggregate({
where: { nrCadastral: input.nrCadastral },
_max: { version: true },
})
)._max.version ?? 0) + 1,
},
// Create DB record in "queued" status — use transaction + advisory lock
// to prevent duplicate version numbers from concurrent requests
const record = await prisma.$transaction(async (tx) => {
await tx.$executeRaw`SELECT pg_advisory_xact_lock(hashtext(${'cfextract:' + input.nrCadastral}))`;
const agg = await tx.cfExtract.aggregate({
where: { nrCadastral: input.nrCadastral },
_max: { version: true },
});
return tx.cfExtract.create({
data: {
nrCadastral: input.nrCadastral,
nrCF: input.nrCF ?? input.nrCadastral,
siruta: input.siruta,
judetIndex: input.judetIndex,
judetName: input.judetName,
uatId: input.uatId,
uatName: input.uatName,
gisFeatureId: input.gisFeatureId,
prodId: input.prodId ?? 14200,
status: "queued",
version: (agg._max.version ?? 0) + 1,
},
});
});
items.push({ extractId: record.id, input });
@@ -418,7 +420,10 @@ async function processBatch(
},
);
// Complete
// Complete — require document date from ANCPI for accurate expiry
if (!doc.dataDocument) {
console.warn(`[epay-queue] Missing dataDocument for extract ${item.extractId}, using download date`);
}
const documentDate = doc.dataDocument
? new Date(doc.dataDocument)
: new Date();
@@ -79,11 +79,24 @@ type SessionEntry = {
const globalStore = globalThis as {
__eterraSessionStore?: Map<string, SessionEntry>;
__eterraCleanupTimer?: ReturnType<typeof setInterval>;
};
const sessionStore =
globalStore.__eterraSessionStore ?? new Map<string, SessionEntry>();
globalStore.__eterraSessionStore = sessionStore;
// Periodic cleanup of expired sessions (every 5 minutes, 9-min TTL)
if (!globalStore.__eterraCleanupTimer) {
globalStore.__eterraCleanupTimer = setInterval(() => {
const now = Date.now();
for (const [key, entry] of sessionStore.entries()) {
if (now - entry.lastUsed > 9 * 60_000) {
sessionStore.delete(key);
}
}
}, 5 * 60_000);
}
const makeCacheKey = (u: string, p: string) =>
crypto.createHash("sha256").update(`${u}:${p}`).digest("hex");
@@ -16,10 +16,24 @@ export type SyncProgress = {
type ProgressStore = Map<string, SyncProgress>;
const g = globalThis as { __parcelSyncProgressStore?: ProgressStore };
const g = globalThis as {
__parcelSyncProgressStore?: ProgressStore;
__progressCleanupTimer?: ReturnType<typeof setInterval>;
};
const store: ProgressStore = g.__parcelSyncProgressStore ?? new Map();
g.__parcelSyncProgressStore = store;
// Periodic cleanup of stale progress entries (every 30 minutes)
if (!g.__progressCleanupTimer) {
g.__progressCleanupTimer = setInterval(() => {
for (const [jobId, p] of store.entries()) {
if (p.status === "done" || p.status === "error") {
store.delete(jobId);
}
}
}, 30 * 60_000);
}
export const setProgress = (p: SyncProgress) => store.set(p.jobId, p);
export const getProgress = (jobId: string) => store.get(jobId);
export const clearProgress = (jobId: string) => store.delete(jobId);
@@ -237,8 +237,16 @@ export async function syncLayer(
},
create: item,
update: {
...item,
siruta: item.siruta,
inspireId: item.inspireId,
cadastralRef: item.cadastralRef,
areaValue: item.areaValue,
isActive: item.isActive,
attributes: item.attributes,
geometry: item.geometry,
syncRunId: item.syncRunId,
updatedAt: new Date(),
// enrichment + enrichedAt preserved — not overwritten
},
});
}