fix: remove raw SQL query that may cause Docker build/runtime issues
Replace complex prisma.\ with simple Prisma findMany + JS stripping. Now that entries are inherently small (base64 in separate blob namespace), JS-based stripping is instant. Also fix migration to check flag before loading.
This commit is contained in:
@@ -59,63 +59,15 @@ export async function GET(request: NextRequest) {
|
||||
// Get all items in namespace
|
||||
const result: Record<string, unknown> = {};
|
||||
|
||||
if (lightweight) {
|
||||
// Strip heavy base64 fields AT THE DATABASE LEVEL using raw SQL.
|
||||
// This prevents PostgreSQL from transferring megabytes of base64
|
||||
// data to Node.js — the heavy strings never leave the DB.
|
||||
// Handles: top-level data/fileData/imageUrl + same keys inside array elements.
|
||||
const rows = await prisma.$queryRaw<
|
||||
Array<{ key: string; value: unknown }>
|
||||
>`
|
||||
SELECT key,
|
||||
CASE WHEN jsonb_typeof(value) = 'object' THEN (
|
||||
SELECT COALESCE(jsonb_object_agg(
|
||||
k,
|
||||
CASE
|
||||
WHEN k IN ('data', 'fileData', 'imageUrl')
|
||||
AND jsonb_typeof(v) = 'string'
|
||||
AND length(v #>> '{}') > 1024
|
||||
THEN '"__stripped__"'::jsonb
|
||||
WHEN jsonb_typeof(v) = 'array' THEN (
|
||||
SELECT COALESCE(jsonb_agg(
|
||||
CASE
|
||||
WHEN jsonb_typeof(el) = 'object' THEN (
|
||||
SELECT COALESCE(jsonb_object_agg(
|
||||
ek,
|
||||
CASE
|
||||
WHEN ek IN ('data', 'fileData', 'imageUrl')
|
||||
AND jsonb_typeof(ev) = 'string'
|
||||
AND length(ev #>> '{}') > 1024
|
||||
THEN '"__stripped__"'::jsonb
|
||||
ELSE ev
|
||||
END
|
||||
), '{}'::jsonb) FROM jsonb_each(el) AS ie(ek, ev)
|
||||
)
|
||||
ELSE el
|
||||
END
|
||||
), '[]'::jsonb)
|
||||
FROM jsonb_array_elements(v) AS ae(el)
|
||||
)
|
||||
ELSE v
|
||||
END
|
||||
), '{}'::jsonb)
|
||||
FROM jsonb_each(value) AS oe(k, v)
|
||||
)
|
||||
ELSE value
|
||||
END AS value
|
||||
FROM "KeyValueStore"
|
||||
WHERE namespace = ${namespace}
|
||||
`;
|
||||
for (const row of rows) {
|
||||
result[row.key] = row.value;
|
||||
}
|
||||
} else {
|
||||
const items = await prisma.keyValueStore.findMany({
|
||||
where: { namespace },
|
||||
});
|
||||
for (const item of items) {
|
||||
result[item.key] = item.value;
|
||||
}
|
||||
// Use Prisma SELECT with only key+value columns (no id/timestamps)
|
||||
const items = await prisma.keyValueStore.findMany({
|
||||
where: { namespace },
|
||||
select: { key: true, value: true },
|
||||
});
|
||||
for (const item of items) {
|
||||
result[item.key] = lightweight
|
||||
? stripHeavyFields(item.value)
|
||||
: item.value;
|
||||
}
|
||||
|
||||
return NextResponse.json({ items: result });
|
||||
|
||||
@@ -83,7 +83,10 @@ function mergeBlobs(
|
||||
...merged,
|
||||
attachments: (merged.attachments ?? []).map((att) => {
|
||||
const data = blobs.attachments?.[att.id];
|
||||
if (data && (!att.data || att.data === "" || att.data === "__stripped__")) {
|
||||
if (
|
||||
data &&
|
||||
(!att.data || att.data === "" || att.data === "__stripped__")
|
||||
) {
|
||||
return { ...att, data };
|
||||
}
|
||||
return att;
|
||||
@@ -178,13 +181,13 @@ export async function migrateEntryBlobs(
|
||||
storage: RegistryStorage,
|
||||
blobStorage: RegistryStorage,
|
||||
): Promise<number> {
|
||||
// Use lightweight=true so PostgreSQL SQL strips data (works for old entries)
|
||||
const all = await storage.exportAll({ lightweight: true });
|
||||
|
||||
// Check if we already migrated (marker key)
|
||||
// Check migration flag FIRST to avoid any heavy loading
|
||||
const migrated = await storage.get<boolean>("__blobs_migrated__");
|
||||
if (migrated) return 0;
|
||||
|
||||
// Load entries (may be heavy on first migration — runs only once)
|
||||
const all = await storage.exportAll();
|
||||
|
||||
// Load full data for entries that need migration
|
||||
let count = 0;
|
||||
for (const [key, value] of Object.entries(all)) {
|
||||
|
||||
Reference in New Issue
Block a user