perf(enrich): rolling doc check resolves changes in-place, always returns early
Instead of marking features enrichedAt=null and falling through to the full enrichment flow (which downloads the entire immovable list ~5min), the rolling doc check now merges updated PROPRIETARI/DATA_CERERE directly into existing enrichment and returns immediately. Also touches enrichedAt on all checked features to rotate the batch, ensuring different features are checked on each daily run. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -217,6 +217,7 @@ export async function enrichFeatures(
|
||||
attributes: true,
|
||||
cadastralRef: true,
|
||||
enrichedAt: true,
|
||||
enrichment: true,
|
||||
},
|
||||
orderBy: { enrichedAt: "asc" },
|
||||
take: ROLLING_BATCH,
|
||||
@@ -245,25 +246,32 @@ export async function enrichFeatures(
|
||||
} catch { /* ignore */ }
|
||||
}
|
||||
|
||||
let rollingMarked = 0;
|
||||
let rollingUpdated = 0;
|
||||
if (rollingWsPk) {
|
||||
// Collect immovable PKs for the batch + map immPk → feature IDs
|
||||
// Collect immovable PKs for the batch + map immPk → feature data
|
||||
const rollingPks: string[] = [];
|
||||
const enrichedAtMap = new Map<string, Date>();
|
||||
const immPkToFeatureIds = new Map<string, string[]>();
|
||||
const immPkToFeatures = new Map<
|
||||
string,
|
||||
Array<{ id: string; enrichment: Record<string, unknown> | null }>
|
||||
>();
|
||||
for (const f of oldestEnriched) {
|
||||
const a = f.attributes as Record<string, unknown>;
|
||||
const immId = normalizeId(a.IMMOVABLE_ID);
|
||||
if (immId && f.enrichedAt) {
|
||||
rollingPks.push(immId);
|
||||
enrichedAtMap.set(immId, f.enrichedAt);
|
||||
const existing = immPkToFeatureIds.get(immId) ?? [];
|
||||
existing.push(f.id);
|
||||
immPkToFeatureIds.set(immId, existing);
|
||||
const existing = immPkToFeatures.get(immId) ?? [];
|
||||
existing.push({
|
||||
id: f.id,
|
||||
enrichment: (f as { enrichment?: Record<string, unknown> | null })
|
||||
.enrichment ?? null,
|
||||
});
|
||||
immPkToFeatures.set(immId, existing);
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch documentation in batches of 50
|
||||
// Fetch documentation in batches of 50 — detect AND resolve changes in-place
|
||||
const DOC_BATCH = 50;
|
||||
for (let i = 0; i < rollingPks.length; i += DOC_BATCH) {
|
||||
const batch = rollingPks.slice(i, i + DOC_BATCH);
|
||||
@@ -272,51 +280,92 @@ export async function enrichFeatures(
|
||||
rollingWsPk,
|
||||
batch,
|
||||
);
|
||||
// Check each registration's appDate against enrichedAt
|
||||
const regs: Array<{
|
||||
landbookIE?: number;
|
||||
nodeType?: string;
|
||||
nodeName?: string;
|
||||
nodeStatus?: number;
|
||||
application?: { appDate?: number };
|
||||
immovablePk?: number;
|
||||
}> = docResp?.partTwoRegs ?? [];
|
||||
|
||||
// Map immovablePk → latest appDate from registrations
|
||||
const immToMaxApp = new Map<string, number>();
|
||||
// Build immovablePk from doc response immovables
|
||||
const docImmovables: Array<{
|
||||
immovablePk?: number;
|
||||
landbookIE?: number;
|
||||
}> = docResp?.immovables ?? [];
|
||||
|
||||
// Map landbookIE → immovablePk
|
||||
const lbToImm = new Map<string, string>();
|
||||
for (const di of docImmovables) {
|
||||
if (di.landbookIE && di.immovablePk) {
|
||||
lbToImm.set(String(di.landbookIE), normalizeId(di.immovablePk));
|
||||
}
|
||||
if (di.landbookIE && di.immovablePk)
|
||||
lbToImm.set(
|
||||
String(di.landbookIE),
|
||||
normalizeId(di.immovablePk),
|
||||
);
|
||||
}
|
||||
|
||||
// Collect max appDate + owner names per immovablePk
|
||||
const immToMaxApp = new Map<string, number>();
|
||||
const ownersByImm = new Map<string, string[]>();
|
||||
for (const reg of regs) {
|
||||
const appDate = reg.application?.appDate;
|
||||
if (typeof appDate !== "number" || appDate <= 0) continue;
|
||||
// Resolve to immovablePk via landbookIE
|
||||
const lb = reg.landbookIE ? String(reg.landbookIE) : "";
|
||||
const immPk = lb ? lbToImm.get(lb) : undefined;
|
||||
if (!immPk) continue;
|
||||
const current = immToMaxApp.get(immPk) ?? 0;
|
||||
if (appDate > current) immToMaxApp.set(immPk, appDate);
|
||||
const appDate = reg.application?.appDate;
|
||||
if (typeof appDate === "number" && appDate > 0) {
|
||||
const c = immToMaxApp.get(immPk) ?? 0;
|
||||
if (appDate > c) immToMaxApp.set(immPk, appDate);
|
||||
}
|
||||
// Collect current owner names (nodeType=P, not radiated)
|
||||
if (
|
||||
String(reg.nodeType ?? "").toUpperCase() === "P" &&
|
||||
reg.nodeName &&
|
||||
(reg.nodeStatus ?? 0) >= 0
|
||||
) {
|
||||
const owners = ownersByImm.get(immPk) ?? [];
|
||||
const name = String(reg.nodeName).trim();
|
||||
if (name && !owners.includes(name)) owners.push(name);
|
||||
ownersByImm.set(immPk, owners);
|
||||
}
|
||||
}
|
||||
|
||||
// Mark features where latest appDate > enrichedAt
|
||||
// Update features where appDate > enrichedAt — merge into existing enrichment
|
||||
const now = new Date();
|
||||
for (const [immPk, maxApp] of immToMaxApp) {
|
||||
const enrichedAt = enrichedAtMap.get(immPk);
|
||||
if (enrichedAt && maxApp > enrichedAt.getTime()) {
|
||||
const featureIds = immPkToFeatureIds.get(immPk) ?? [];
|
||||
if (featureIds.length > 0) {
|
||||
await prisma.gisFeature.updateMany({
|
||||
where: { id: { in: featureIds } },
|
||||
data: { enrichedAt: null },
|
||||
if (!enrichedAt || maxApp <= enrichedAt.getTime()) continue;
|
||||
const features = immPkToFeatures.get(immPk) ?? [];
|
||||
const owners = ownersByImm.get(immPk) ?? [];
|
||||
const ownerStr = owners.join("; ") || "-";
|
||||
const appDateIso = new Date(maxApp)
|
||||
.toISOString()
|
||||
.slice(0, 10);
|
||||
for (const feat of features) {
|
||||
// Merge: keep existing enrichment, update doc-based fields
|
||||
const existing = feat.enrichment ?? {};
|
||||
const merged = {
|
||||
...existing,
|
||||
PROPRIETARI: ownerStr,
|
||||
DATA_CERERE: appDateIso,
|
||||
};
|
||||
await prisma.gisFeature.update({
|
||||
where: { id: feat.id },
|
||||
data: {
|
||||
enrichment:
|
||||
merged as unknown as Prisma.InputJsonValue,
|
||||
enrichedAt: now,
|
||||
},
|
||||
});
|
||||
rollingMarked += featureIds.length;
|
||||
rollingUpdated++;
|
||||
}
|
||||
}
|
||||
|
||||
// Touch enrichedAt on checked features (even if unchanged) to rotate the batch
|
||||
const checkedIds = batch
|
||||
.flatMap((pk) => (immPkToFeatures.get(pk) ?? []).map((f) => f.id));
|
||||
if (checkedIds.length > 0) {
|
||||
await prisma.gisFeature.updateMany({
|
||||
where: { id: { in: checkedIds }, enrichedAt: { not: null } },
|
||||
data: { enrichedAt: now },
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(
|
||||
@@ -327,29 +376,26 @@ export async function enrichFeatures(
|
||||
}
|
||||
}
|
||||
|
||||
if (rollingMarked > 0) {
|
||||
// Always return early — rolling check is self-contained
|
||||
const rollingNote = rollingUpdated > 0
|
||||
? `Rolling: ${rollingUpdated} parcele actualizate`
|
||||
: "Date deja complete";
|
||||
console.log(
|
||||
`[enrich] siruta=${siruta}: rolling check found ${rollingMarked} features with new documentation — will re-enrich`,
|
||||
);
|
||||
// Don't return early — fall through to normal enrichment
|
||||
} else {
|
||||
console.log(
|
||||
`[enrich] siruta=${siruta}: rolling check OK — all ${_totalCount} features up to date`,
|
||||
`[enrich] siruta=${siruta}: ${rollingNote} (checked ${oldestEnriched.length})`,
|
||||
);
|
||||
options?.onProgress?.(
|
||||
_totalCount,
|
||||
_totalCount,
|
||||
"Îmbogățire — date deja complete",
|
||||
`Îmbogățire — ${rollingNote}`,
|
||||
);
|
||||
return {
|
||||
siruta,
|
||||
enrichedCount: _totalCount,
|
||||
totalFeatures: _totalCount,
|
||||
unenrichedCount: 0,
|
||||
buildingCrossRefs: 0,
|
||||
buildingCrossRefs: rollingUpdated,
|
||||
status: "done",
|
||||
};
|
||||
}
|
||||
} else {
|
||||
// No enriched features to check — early bailout
|
||||
options?.onProgress?.(
|
||||
|
||||
Reference in New Issue
Block a user