perf: separate blob storage for registratura attachments

Root cause: even with SQL-level stripping, PostgreSQL must TOAST-decompress
entire multi-MB JSONB values from disk before any processing. For 5 entries
with PDF attachments (25-50MB total), this takes several seconds.

Fix: store base64 attachment data in separate namespace 'registratura-blobs'.
Main entries are inherently small (~1-2KB). List queries never touch heavy data.

Changes:
- registry-service.ts: extractBlobs/mergeBlobs split base64 on save/load,
  migrateEntryBlobs() one-time migration for existing entries
- use-registry.ts: dual namespace (registratura + registratura-blobs),
  migration runs on first mount
- registratura-module.tsx: removed useContacts/useTags hooks that triggered
  2 unnecessary API fetches on page load (write-only ops use direct storage)

Before: 3 API calls on mount, one reading 25-50MB from PostgreSQL
After: 1 API call on mount, reading ~5-10KB total
This commit is contained in:
AI Assistant
2026-02-27 23:35:04 +02:00
parent 8385041bb0
commit f8c19bb5b4
3 changed files with 227 additions and 53 deletions
@@ -24,8 +24,8 @@ import {
DialogFooter, DialogFooter,
} from "@/shared/components/ui/dialog"; } from "@/shared/components/ui/dialog";
import { useRegistry } from "../hooks/use-registry"; import { useRegistry } from "../hooks/use-registry";
import { useContacts } from "@/modules/address-book/hooks/use-contacts"; import { useStorage, useStorageService } from "@/core/storage";
import { useTags } from "@/core/tagging"; import { v4 as uuid } from "uuid";
import { RegistryFilters } from "./registry-filters"; import { RegistryFilters } from "./registry-filters";
import { RegistryTable } from "./registry-table"; import { RegistryTable } from "./registry-table";
import { RegistryEntryForm } from "./registry-entry-form"; import { RegistryEntryForm } from "./registry-entry-form";
@@ -55,15 +55,16 @@ export function RegistraturaModule() {
removeDeadline, removeDeadline,
} = useRegistry(); } = useRegistry();
const { addContact } = useContacts(); // Direct storage for write-only operations — avoids loading all contacts/tags on mount
const { createTag } = useTags("document-type"); const contactStorage = useStorage("address-book");
const tagStorageService = useStorageService();
const [viewMode, setViewMode] = useState<ViewMode>("list"); const [viewMode, setViewMode] = useState<ViewMode>("list");
const [editingEntry, setEditingEntry] = useState<RegistryEntry | null>(null); const [editingEntry, setEditingEntry] = useState<RegistryEntry | null>(null);
const [closingId, setClosingId] = useState<string | null>(null); const [closingId, setClosingId] = useState<string | null>(null);
const [linkCheckId, setLinkCheckId] = useState<string | null>(null); const [linkCheckId, setLinkCheckId] = useState<string | null>(null);
// ── Bidirectional Address Book integration ── // ── Bidirectional Address Book integration (write-only, no eager fetch) ──
const handleCreateContact = useCallback( const handleCreateContact = useCallback(
async (data: { async (data: {
name: string; name: string;
@@ -71,7 +72,9 @@ export function RegistraturaModule() {
email: string; email: string;
}): Promise<AddressContact | undefined> => { }): Promise<AddressContact | undefined> => {
try { try {
const contact = await addContact({ const now = new Date().toISOString();
const contact: AddressContact = {
id: uuid(),
name: data.name, name: data.name,
company: "", company: "",
type: "collaborator", type: "collaborator",
@@ -88,30 +91,36 @@ export function RegistraturaModule() {
tags: [], tags: [],
notes: "Creat automat din Registratură", notes: "Creat automat din Registratură",
visibility: "all", visibility: "all",
}); createdAt: now,
updatedAt: now,
};
await contactStorage.set(`contact:${contact.id}`, contact);
return contact; return contact;
} catch { } catch {
return undefined; return undefined;
} }
}, },
[addContact], [contactStorage],
); );
// ── Bidirectional Tag Manager integration ── // ── Bidirectional Tag Manager integration (write-only, no eager fetch) ──
const handleCreateDocType = useCallback( const handleCreateDocType = useCallback(
async (label: string) => { async (label: string) => {
try { try {
await createTag({ const tagId = uuid();
await tagStorageService.set("tags", tagId, {
id: tagId,
label, label,
category: "document-type", category: "document-type",
scope: "global", scope: "global",
color: "#64748b", color: "#64748b",
createdAt: new Date().toISOString(),
}); });
} catch { } catch {
// tag may already exist — ignore // tag may already exist — ignore
} }
}, },
[createTag], [tagStorageService],
); );
const handleAdd = async ( const handleAdd = async (
+35 -34
View File
@@ -1,6 +1,6 @@
"use client"; "use client";
import { useState, useEffect, useCallback } from "react"; import { useState, useEffect, useCallback, useRef } from "react";
import { useStorage } from "@/core/storage"; import { useStorage } from "@/core/storage";
import { v4 as uuid } from "uuid"; import { v4 as uuid } from "uuid";
import type { import type {
@@ -17,6 +17,7 @@ import {
saveEntry, saveEntry,
deleteEntry, deleteEntry,
generateRegistryNumber, generateRegistryNumber,
migrateEntryBlobs,
} from "../services/registry-service"; } from "../services/registry-service";
import { import {
createTrackedDeadline, createTrackedDeadline,
@@ -34,6 +35,7 @@ export interface RegistryFilters {
export function useRegistry() { export function useRegistry() {
const storage = useStorage("registratura"); const storage = useStorage("registratura");
const blobStorage = useStorage("registratura-blobs");
const [entries, setEntries] = useState<RegistryEntry[]>([]); const [entries, setEntries] = useState<RegistryEntry[]>([]);
const [loading, setLoading] = useState(true); const [loading, setLoading] = useState(true);
const [filters, setFilters] = useState<RegistryFilters>({ const [filters, setFilters] = useState<RegistryFilters>({
@@ -43,6 +45,7 @@ export function useRegistry() {
documentType: "all", documentType: "all",
company: "all", company: "all",
}); });
const migrationRan = useRef(false);
const refresh = useCallback(async () => { const refresh = useCallback(async () => {
setLoading(true); setLoading(true);
@@ -51,17 +54,23 @@ export function useRegistry() {
setLoading(false); setLoading(false);
}, [storage]); }, [storage]);
// On mount: run migration (once), then load entries
// eslint-disable-next-line react-hooks/set-state-in-effect // eslint-disable-next-line react-hooks/set-state-in-effect
useEffect(() => { useEffect(() => {
refresh(); const init = async () => {
}, [refresh]); if (!migrationRan.current) {
migrationRan.current = true;
await migrateEntryBlobs(storage, blobStorage);
}
await refresh();
};
init();
}, [refresh, storage, blobStorage]);
const addEntry = useCallback( const addEntry = useCallback(
async ( async (
data: Omit<RegistryEntry, "id" | "number" | "createdAt" | "updatedAt">, data: Omit<RegistryEntry, "id" | "number" | "createdAt" | "updatedAt">,
) => { ) => {
// Fetch fresh entries to prevent duplicate numbers from stale state.
// This single call replaces what was previously list() + N×get().
const freshEntries = await getAllEntries(storage); const freshEntries = await getAllEntries(storage);
const now = new Date().toISOString(); const now = new Date().toISOString();
const number = generateRegistryNumber( const number = generateRegistryNumber(
@@ -77,12 +86,11 @@ export function useRegistry() {
createdAt: now, createdAt: now,
updatedAt: now, updatedAt: now,
}; };
await saveEntry(storage, entry); await saveEntry(storage, blobStorage, entry);
// Update local state directly to avoid a second full fetch
setEntries((prev) => [entry, ...prev]); setEntries((prev) => [entry, ...prev]);
return entry; return entry;
}, },
[storage], [storage, blobStorage],
); );
const updateEntry = useCallback( const updateEntry = useCallback(
@@ -97,50 +105,48 @@ export function useRegistry() {
createdAt: existing.createdAt, createdAt: existing.createdAt,
updatedAt: new Date().toISOString(), updatedAt: new Date().toISOString(),
}; };
await saveEntry(storage, updated); await saveEntry(storage, blobStorage, updated);
await refresh(); await refresh();
}, },
[storage, refresh, entries], [storage, blobStorage, refresh, entries],
); );
const removeEntry = useCallback( const removeEntry = useCallback(
async (id: string) => { async (id: string) => {
await deleteEntry(storage, id); await deleteEntry(storage, blobStorage, id);
await refresh(); await refresh();
}, },
[storage, refresh], [storage, blobStorage, refresh],
); );
/** Close an entry and optionally its linked entries.
* Batches all saves, then does a single refresh at the end.
*/
const closeEntry = useCallback( const closeEntry = useCallback(
async (id: string, closeLinked: boolean) => { async (id: string, closeLinked: boolean) => {
const entry = entries.find((e) => e.id === id); const entry = entries.find((e) => e.id === id);
if (!entry) return; if (!entry) return;
// Save main entry as closed
const now = new Date().toISOString(); const now = new Date().toISOString();
const closedMain: RegistryEntry = { const closedMain: RegistryEntry = {
...entry, ...entry,
status: "inchis", status: "inchis",
updatedAt: now, updatedAt: now,
}; };
await saveEntry(storage, closedMain); await saveEntry(storage, blobStorage, closedMain);
// Close linked entries in parallel
const linked = entry.linkedEntryIds ?? []; const linked = entry.linkedEntryIds ?? [];
if (closeLinked && linked.length > 0) { if (closeLinked && linked.length > 0) {
const saves = linked const saves = linked
.map((linkedId) => entries.find((e) => e.id === linkedId)) .map((linkedId) => entries.find((e) => e.id === linkedId))
.filter((e): e is RegistryEntry => !!e && e.status !== "inchis") .filter((e): e is RegistryEntry => !!e && e.status !== "inchis")
.map((e) => .map((e) =>
saveEntry(storage, { ...e, status: "inchis", updatedAt: now }), saveEntry(storage, blobStorage, {
...e,
status: "inchis",
updatedAt: now,
}),
); );
await Promise.all(saves); await Promise.all(saves);
} }
// Single refresh at the end
await refresh(); await refresh();
}, },
[entries, storage, refresh], [entries, storage, blobStorage, refresh],
); );
const updateFilter = useCallback( const updateFilter = useCallback(
@@ -169,11 +175,11 @@ export function useRegistry() {
trackedDeadlines: [...existing, tracked], trackedDeadlines: [...existing, tracked],
updatedAt: new Date().toISOString(), updatedAt: new Date().toISOString(),
}; };
await saveEntry(storage, updated); await saveEntry(storage, blobStorage, updated);
await refresh(); await refresh();
return tracked; return tracked;
}, },
[entries, storage, refresh], [entries, storage, blobStorage, refresh],
); );
const resolveDeadline = useCallback( const resolveDeadline = useCallback(
@@ -198,9 +204,8 @@ export function useRegistry() {
trackedDeadlines: updatedDeadlines, trackedDeadlines: updatedDeadlines,
updatedAt: new Date().toISOString(), updatedAt: new Date().toISOString(),
}; };
await saveEntry(storage, updated); await saveEntry(storage, blobStorage, updated);
// If the resolved deadline has a chain, automatically check for the next type
const def = getDeadlineType(dl.typeId); const def = getDeadlineType(dl.typeId);
await refresh(); await refresh();
@@ -213,7 +218,7 @@ export function useRegistry() {
return resolved; return resolved;
}, },
[entries, storage, refresh], [entries, storage, blobStorage, refresh],
); );
const removeDeadline = useCallback( const removeDeadline = useCallback(
@@ -226,10 +231,10 @@ export function useRegistry() {
trackedDeadlines: deadlines.filter((d) => d.id !== deadlineId), trackedDeadlines: deadlines.filter((d) => d.id !== deadlineId),
updatedAt: new Date().toISOString(), updatedAt: new Date().toISOString(),
}; };
await saveEntry(storage, updated); await saveEntry(storage, blobStorage, updated);
await refresh(); await refresh();
}, },
[entries, storage, refresh], [entries, storage, blobStorage, refresh],
); );
const filteredEntries = entries.filter((entry) => { const filteredEntries = entries.filter((entry) => {
@@ -256,15 +261,11 @@ export function useRegistry() {
return true; return true;
}); });
/**
* Load a single entry WITH full attachment data (for editing).
* The list uses lightweight mode that strips base64 data.
*/
const loadFullEntry = useCallback( const loadFullEntry = useCallback(
async (id: string): Promise<RegistryEntry | null> => { async (id: string): Promise<RegistryEntry | null> => {
return getFullEntry(storage, id); return getFullEntry(storage, blobStorage, id);
}, },
[storage], [storage, blobStorage],
); );
return { return {
@@ -1,5 +1,5 @@
import type { CompanyId } from "@/core/auth/types"; import type { CompanyId } from "@/core/auth/types";
import type { RegistryEntry } from "../types"; import type { RegistryEntry, RegistryAttachment } from "../types";
const STORAGE_PREFIX = "entry:"; const STORAGE_PREFIX = "entry:";
@@ -13,15 +13,108 @@ export interface RegistryStorage {
}): Promise<Record<string, unknown>>; }): Promise<Record<string, unknown>>;
} }
// ── Blob separation ──
// Base64 attachment data is stored in a SEPARATE namespace ("registratura-blobs")
// so the main entries are always small (~1-2KB each). PostgreSQL never needs to
// decompress multi-MB TOAST chunks for list queries.
/** Shape of the blob record for one entry */
interface EntryBlobs {
/** attachmentId → base64 data */
attachments?: Record<string, string>;
/** closure attachment base64 */
closureAttachment?: string;
}
/** Strip base64 from attachments, return stripped entry + extracted blobs */
function extractBlobs(entry: RegistryEntry): {
stripped: RegistryEntry;
blobs: EntryBlobs | null;
} {
const blobs: EntryBlobs = {};
let hasBlobs = false;
// Strip attachment data (keep metadata: id, name, type, size, addedAt)
const strippedAttachments: RegistryAttachment[] = (
entry.attachments ?? []
).map((att) => {
if (att.data && att.data.length > 1024 && att.data !== "__stripped__") {
if (!blobs.attachments) blobs.attachments = {};
blobs.attachments[att.id] = att.data;
hasBlobs = true;
return { ...att, data: "" };
}
return att;
});
let stripped: RegistryEntry = { ...entry, attachments: strippedAttachments };
// Strip closure attachment data
if (
entry.closureInfo?.attachment?.data &&
entry.closureInfo.attachment.data.length > 1024 &&
entry.closureInfo.attachment.data !== "__stripped__"
) {
blobs.closureAttachment = entry.closureInfo.attachment.data;
hasBlobs = true;
stripped = {
...stripped,
closureInfo: {
...entry.closureInfo,
attachment: { ...entry.closureInfo.attachment, data: "" },
},
};
}
return { stripped, blobs: hasBlobs ? blobs : null };
}
/** Merge blob data back into a stripped entry */
function mergeBlobs(
entry: RegistryEntry,
blobs: EntryBlobs | null,
): RegistryEntry {
if (!blobs) return entry;
let merged = entry;
if (blobs.attachments) {
merged = {
...merged,
attachments: (merged.attachments ?? []).map((att) => {
const data = blobs.attachments?.[att.id];
if (data && (!att.data || att.data === "" || att.data === "__stripped__")) {
return { ...att, data };
}
return att;
}),
};
}
if (blobs.closureAttachment && merged.closureInfo?.attachment) {
merged = {
...merged,
closureInfo: {
...merged.closureInfo,
attachment: {
...merged.closureInfo.attachment,
data: blobs.closureAttachment,
},
},
};
}
return merged;
}
/** /**
* Load all registry entries in a SINGLE lightweight request. * Load all registry entries. Entries are inherently lightweight because
* Uses exportAll({ lightweight: true }) which strips base64 attachment data * base64 blobs are stored in a separate namespace. No SQL stripping needed.
* server-side, reducing payload from potentially 30-60MB to <100KB.
*/ */
export async function getAllEntries( export async function getAllEntries(
storage: RegistryStorage, storage: RegistryStorage,
): Promise<RegistryEntry[]> { ): Promise<RegistryEntry[]> {
const all = await storage.exportAll({ lightweight: true }); const all = await storage.exportAll();
const entries: RegistryEntry[] = []; const entries: RegistryEntry[] = [];
for (const [key, value] of Object.entries(all)) { for (const [key, value] of Object.entries(all)) {
if (key.startsWith(STORAGE_PREFIX) && value) { if (key.startsWith(STORAGE_PREFIX) && value) {
@@ -33,27 +126,98 @@ export async function getAllEntries(
} }
/** /**
* Load a single full entry (with attachment data) for editing. * Load a single full entry WITH attachment data (for editing).
* Loads the lightweight entry + its blob record and merges them.
*/ */
export async function getFullEntry( export async function getFullEntry(
storage: RegistryStorage, storage: RegistryStorage,
blobStorage: RegistryStorage,
id: string, id: string,
): Promise<RegistryEntry | null> { ): Promise<RegistryEntry | null> {
return storage.get<RegistryEntry>(`${STORAGE_PREFIX}${id}`); const entry = await storage.get<RegistryEntry>(`${STORAGE_PREFIX}${id}`);
if (!entry) return null;
const blobs = await blobStorage.get<EntryBlobs>(id);
return mergeBlobs(entry, blobs);
} }
/**
* Save an entry. Strips base64 from the main entry and stores blobs
* in the separate blob namespace so list queries stay fast.
*/
export async function saveEntry( export async function saveEntry(
storage: RegistryStorage, storage: RegistryStorage,
blobStorage: RegistryStorage,
entry: RegistryEntry, entry: RegistryEntry,
): Promise<void> { ): Promise<void> {
await storage.set(`${STORAGE_PREFIX}${entry.id}`, entry); const { stripped, blobs } = extractBlobs(entry);
await storage.set(`${STORAGE_PREFIX}${entry.id}`, stripped);
if (blobs) {
await blobStorage.set(entry.id, blobs);
}
} }
/**
* Delete an entry and its blob data.
*/
export async function deleteEntry( export async function deleteEntry(
storage: RegistryStorage, storage: RegistryStorage,
blobStorage: RegistryStorage,
id: string, id: string,
): Promise<void> { ): Promise<void> {
await storage.delete(`${STORAGE_PREFIX}${id}`); await storage.delete(`${STORAGE_PREFIX}${id}`);
// Clean up blob data (ignore errors if no blobs exist)
await blobStorage.delete(id).catch(() => {});
}
/**
* One-time migration: move base64 data from entries to blob namespace.
* Runs on first load if unmigrated entries exist. After migration,
* entries are inherently lightweight.
*/
export async function migrateEntryBlobs(
storage: RegistryStorage,
blobStorage: RegistryStorage,
): Promise<number> {
// Use lightweight=true so PostgreSQL SQL strips data (works for old entries)
const all = await storage.exportAll({ lightweight: true });
// Check if we already migrated (marker key)
const migrated = await storage.get<boolean>("__blobs_migrated__");
if (migrated) return 0;
// Load full data for entries that need migration
let count = 0;
for (const [key, value] of Object.entries(all)) {
if (!key.startsWith(STORAGE_PREFIX) || !value) continue;
const entry = value as RegistryEntry;
// Check if this entry might have attachments that need migrating
const hasAttachments =
(entry.attachments ?? []).length > 0 || entry.closureInfo?.attachment;
if (!hasAttachments) continue;
// Check if blobs already exist for this entry
const existingBlobs = await blobStorage.get<EntryBlobs>(entry.id);
if (existingBlobs) continue;
// Load the full entry (with base64 data) from DB
const full = await storage.get<RegistryEntry>(
`${STORAGE_PREFIX}${entry.id}`,
);
if (!full) continue;
// Check if there's actually heavy data to extract
const { stripped, blobs } = extractBlobs(full);
if (blobs) {
await storage.set(`${STORAGE_PREFIX}${entry.id}`, stripped);
await blobStorage.set(entry.id, blobs);
count++;
}
}
// Mark migration done
await storage.set("__blobs_migrated__", true);
return count;
} }
const COMPANY_PREFIXES: Record<CompanyId, string> = { const COMPANY_PREFIXES: Record<CompanyId, string> = {