fix: remove raw SQL query that may cause Docker build/runtime issues
Replace complex prisma.\ with simple Prisma findMany + JS stripping. Now that entries are inherently small (base64 in separate blob namespace), JS-based stripping is instant. Also fix migration to check flag before loading.
This commit is contained in:
@@ -59,63 +59,15 @@ export async function GET(request: NextRequest) {
|
|||||||
// Get all items in namespace
|
// Get all items in namespace
|
||||||
const result: Record<string, unknown> = {};
|
const result: Record<string, unknown> = {};
|
||||||
|
|
||||||
if (lightweight) {
|
// Use Prisma SELECT with only key+value columns (no id/timestamps)
|
||||||
// Strip heavy base64 fields AT THE DATABASE LEVEL using raw SQL.
|
const items = await prisma.keyValueStore.findMany({
|
||||||
// This prevents PostgreSQL from transferring megabytes of base64
|
where: { namespace },
|
||||||
// data to Node.js — the heavy strings never leave the DB.
|
select: { key: true, value: true },
|
||||||
// Handles: top-level data/fileData/imageUrl + same keys inside array elements.
|
});
|
||||||
const rows = await prisma.$queryRaw<
|
for (const item of items) {
|
||||||
Array<{ key: string; value: unknown }>
|
result[item.key] = lightweight
|
||||||
>`
|
? stripHeavyFields(item.value)
|
||||||
SELECT key,
|
: item.value;
|
||||||
CASE WHEN jsonb_typeof(value) = 'object' THEN (
|
|
||||||
SELECT COALESCE(jsonb_object_agg(
|
|
||||||
k,
|
|
||||||
CASE
|
|
||||||
WHEN k IN ('data', 'fileData', 'imageUrl')
|
|
||||||
AND jsonb_typeof(v) = 'string'
|
|
||||||
AND length(v #>> '{}') > 1024
|
|
||||||
THEN '"__stripped__"'::jsonb
|
|
||||||
WHEN jsonb_typeof(v) = 'array' THEN (
|
|
||||||
SELECT COALESCE(jsonb_agg(
|
|
||||||
CASE
|
|
||||||
WHEN jsonb_typeof(el) = 'object' THEN (
|
|
||||||
SELECT COALESCE(jsonb_object_agg(
|
|
||||||
ek,
|
|
||||||
CASE
|
|
||||||
WHEN ek IN ('data', 'fileData', 'imageUrl')
|
|
||||||
AND jsonb_typeof(ev) = 'string'
|
|
||||||
AND length(ev #>> '{}') > 1024
|
|
||||||
THEN '"__stripped__"'::jsonb
|
|
||||||
ELSE ev
|
|
||||||
END
|
|
||||||
), '{}'::jsonb) FROM jsonb_each(el) AS ie(ek, ev)
|
|
||||||
)
|
|
||||||
ELSE el
|
|
||||||
END
|
|
||||||
), '[]'::jsonb)
|
|
||||||
FROM jsonb_array_elements(v) AS ae(el)
|
|
||||||
)
|
|
||||||
ELSE v
|
|
||||||
END
|
|
||||||
), '{}'::jsonb)
|
|
||||||
FROM jsonb_each(value) AS oe(k, v)
|
|
||||||
)
|
|
||||||
ELSE value
|
|
||||||
END AS value
|
|
||||||
FROM "KeyValueStore"
|
|
||||||
WHERE namespace = ${namespace}
|
|
||||||
`;
|
|
||||||
for (const row of rows) {
|
|
||||||
result[row.key] = row.value;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const items = await prisma.keyValueStore.findMany({
|
|
||||||
where: { namespace },
|
|
||||||
});
|
|
||||||
for (const item of items) {
|
|
||||||
result[item.key] = item.value;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return NextResponse.json({ items: result });
|
return NextResponse.json({ items: result });
|
||||||
|
|||||||
@@ -83,7 +83,10 @@ function mergeBlobs(
|
|||||||
...merged,
|
...merged,
|
||||||
attachments: (merged.attachments ?? []).map((att) => {
|
attachments: (merged.attachments ?? []).map((att) => {
|
||||||
const data = blobs.attachments?.[att.id];
|
const data = blobs.attachments?.[att.id];
|
||||||
if (data && (!att.data || att.data === "" || att.data === "__stripped__")) {
|
if (
|
||||||
|
data &&
|
||||||
|
(!att.data || att.data === "" || att.data === "__stripped__")
|
||||||
|
) {
|
||||||
return { ...att, data };
|
return { ...att, data };
|
||||||
}
|
}
|
||||||
return att;
|
return att;
|
||||||
@@ -178,13 +181,13 @@ export async function migrateEntryBlobs(
|
|||||||
storage: RegistryStorage,
|
storage: RegistryStorage,
|
||||||
blobStorage: RegistryStorage,
|
blobStorage: RegistryStorage,
|
||||||
): Promise<number> {
|
): Promise<number> {
|
||||||
// Use lightweight=true so PostgreSQL SQL strips data (works for old entries)
|
// Check migration flag FIRST to avoid any heavy loading
|
||||||
const all = await storage.exportAll({ lightweight: true });
|
|
||||||
|
|
||||||
// Check if we already migrated (marker key)
|
|
||||||
const migrated = await storage.get<boolean>("__blobs_migrated__");
|
const migrated = await storage.get<boolean>("__blobs_migrated__");
|
||||||
if (migrated) return 0;
|
if (migrated) return 0;
|
||||||
|
|
||||||
|
// Load entries (may be heavy on first migration — runs only once)
|
||||||
|
const all = await storage.exportAll();
|
||||||
|
|
||||||
// Load full data for entries that need migration
|
// Load full data for entries that need migration
|
||||||
let count = 0;
|
let count = 0;
|
||||||
for (const [key, value] of Object.entries(all)) {
|
for (const [key, value] of Object.entries(all)) {
|
||||||
|
|||||||
Reference in New Issue
Block a user