diff --git a/src/app/api/compress-pdf/extreme/route.ts b/src/app/api/compress-pdf/extreme/route.ts index 675645f..3070afc 100644 --- a/src/app/api/compress-pdf/extreme/route.ts +++ b/src/app/api/compress-pdf/extreme/route.ts @@ -8,58 +8,75 @@ import { tmpdir } from "os"; const execFileAsync = promisify(execFile); -// Ghostscript args for extreme compression -// Key: -dPassThroughJPEGImages=false forces recompression of existing JPEGs -// QFactor 1.5 ≈ JPEG quality 25-30, matching iLovePDF extreme -function gsArgs(input: string, output: string): string[] { +// Ghostscript args for PDF compression. +// +// CRITICAL: Do NOT use -dPDFSETTINGS=/screen — it overrides font encoding +// and produces garbled text. Instead, set each parameter individually so we +// only compress IMAGES while keeping fonts and text intact. +// +// Strategy: recompress all raster images to JPEG at quality ~40-50, +// downsample to 150 DPI, deduplicate, compress streams. Fonts untouched. +function gsArgs( + input: string, + output: string, + level: "extreme" | "high" | "balanced", +): string[] { + // Quality presets — only affect images, never fonts + const presets = { + extreme: { dpi: 100, qfactor: 1.2 }, // ~quality 35, aggressive + high: { dpi: 150, qfactor: 0.76 }, // ~quality 50, good balance + balanced: { dpi: 200, qfactor: 0.4 }, // ~quality 70, minimal loss + }; + const { dpi, qfactor } = presets[level]; + return [ "-sDEVICE=pdfwrite", "-dCompatibilityLevel=1.5", "-dNOPAUSE", "-dBATCH", `-sOutputFile=${output}`, - "-dPDFSETTINGS=/screen", - // Force recompression of ALL images (the #1 key to matching iLovePDF) + + // ── Image recompression (the main size reducer) ── + // Force re-encode of existing JPEGs — without this, GS passes them through "-dPassThroughJPEGImages=false", "-dPassThroughJPXImages=false", + // Use DCT (JPEG) for all color/gray images "-dAutoFilterColorImages=false", "-dAutoFilterGrayImages=false", "-dColorImageFilter=/DCTEncode", "-dGrayImageFilter=/DCTEncode", - // Aggressive downsampling + "-dEncodeColorImages=true", + "-dEncodeGrayImages=true", + + // ── Downsampling ── "-dDownsampleColorImages=true", "-dDownsampleGrayImages=true", "-dDownsampleMonoImages=true", - "-dColorImageResolution=72", - "-dGrayImageResolution=72", - "-dMonoImageResolution=150", + `-dColorImageResolution=${dpi}`, + `-dGrayImageResolution=${dpi}`, + `-dMonoImageResolution=${Math.max(dpi, 200)}`, // mono needs higher DPI "-dColorImageDownsampleType=/Bicubic", "-dGrayImageDownsampleType=/Bicubic", "-dColorImageDownsampleThreshold=1.0", "-dGrayImageDownsampleThreshold=1.0", "-dMonoImageDownsampleThreshold=1.0", - // Encoding - "-dEncodeColorImages=true", - "-dEncodeGrayImages=true", - // Font & structure - "-dSubsetFonts=true", - "-dEmbedAllFonts=true", + + // ── Font handling — PRESERVE everything ── + "-dSubsetFonts=true", // subset is safe — keeps encoding, reduces size + "-dEmbedAllFonts=true", // ensure all fonts stay embedded "-dCompressFonts=true", + + // ── Structure / stream optimization ── "-dCompressStreams=true", - // CMYK→RGB (saves ~25% on CMYK images) - "-sColorConversionStrategy=RGB", - // Structure optimization "-dDetectDuplicateImages=true", - "-dWriteXRefStm=true", - "-dWriteObjStms=true", - "-dPreserveMarkedContent=false", - "-dOmitXMP=true", - // JPEG quality dictionaries (QFactor 1.5 ≈ quality 25-30) + "-sColorConversionStrategy=RGB", // CMYK→RGB saves ~25% on CMYK images + + // ── JPEG quality dictionaries ── "-c", - "<< /ColorACSImageDict << /QFactor 1.5 /Blend 1 /ColorTransform 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams", - "<< /GrayACSImageDict << /QFactor 1.5 /Blend 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams", - "<< /ColorImageDict << /QFactor 1.5 /Blend 1 /ColorTransform 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams", - "<< /GrayImageDict << /QFactor 1.5 /Blend 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams", + `<< /ColorACSImageDict << /QFactor ${qfactor} /Blend 1 /ColorTransform 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams`, + `<< /GrayACSImageDict << /QFactor ${qfactor} /Blend 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams`, + `<< /ColorImageDict << /QFactor ${qfactor} /Blend 1 /ColorTransform 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams`, + `<< /GrayImageDict << /QFactor ${qfactor} /Blend 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams`, "-f", input, ]; @@ -129,6 +146,42 @@ function extractFileFromMultipart( return null; } +/** + * Extract a simple text field value from a multipart body. + * Returns null if the field is not found. + */ +function extractFieldFromMultipart( + raw: Buffer, + boundary: string, + fieldName: string, +): string | null { + const boundaryBuf = Buffer.from(`--${boundary}`); + const headerSep = Buffer.from("\r\n\r\n"); + const crlf = Buffer.from("\r\n"); + const namePattern = `name="${fieldName}"`; + + let searchFrom = 0; + while (searchFrom < raw.length) { + const partStart = raw.indexOf(boundaryBuf, searchFrom); + if (partStart === -1) break; + const lineEnd = raw.indexOf(crlf, partStart); + if (lineEnd === -1) break; + const headerEnd = raw.indexOf(headerSep, lineEnd); + if (headerEnd === -1) break; + + const headers = raw.subarray(lineEnd + 2, headerEnd).toString("utf8"); + if (headers.includes(namePattern) && !headers.includes("filename=")) { + const valueStart = headerEnd + 4; + const nextBoundary = raw.indexOf(Buffer.from(`\r\n--${boundary}`), valueStart); + if (nextBoundary > valueStart) { + return raw.subarray(valueStart, nextBoundary).toString("utf8").trim(); + } + } + searchFrom = headerEnd + 4; + } + return null; +} + async function cleanup(dir: string) { try { const { readdir } = await import("fs/promises"); @@ -186,14 +239,20 @@ export async function POST(req: NextRequest) { ); } + // Extract compression level from multipart (optional "level" field) + const levelParam = extractFieldFromMultipart(rawBuf, boundary, "level"); + const level: "extreme" | "high" | "balanced" = + levelParam === "high" ? "high" : + levelParam === "balanced" ? "balanced" : "extreme"; + await writeFile(inputPath, pdfBuffer); const originalSize = pdfBuffer.length; - // Step 1: Ghostscript — aggressive image recompression + downsampling + // Step 1: Ghostscript — image recompression + downsampling (fonts untouched) try { const { stderr } = await execFileAsync( "gs", - gsArgs(inputPath, gsOutputPath), + gsArgs(inputPath, gsOutputPath, level), { timeout: 300_000, // 5 min for very large files maxBuffer: 10 * 1024 * 1024, // 10MB stderr buffer diff --git a/src/modules/mini-utilities/components/mini-utilities-module.tsx b/src/modules/mini-utilities/components/mini-utilities-module.tsx index a06771f..8f2706c 100644 --- a/src/modules/mini-utilities/components/mini-utilities-module.tsx +++ b/src/modules/mini-utilities/components/mini-utilities-module.tsx @@ -1662,15 +1662,10 @@ function PdfReducer() { try { const formData = new FormData(); formData.append("fileInput", file); + // All modes use the GS endpoint with a level parameter + formData.append("level", mode === "extreme" ? "extreme" : mode === "max" ? "high" : "balanced"); - let endpoint = "/api/compress-pdf"; - if (mode === "extreme") { - endpoint = "/api/compress-pdf/extreme"; - } else { - formData.append("optimizeLevel", mode === "max" ? "4" : "2"); - } - - const res = await fetch(endpoint, { + const res = await fetch("/api/compress-pdf/extreme", { method: "POST", body: formData, }); @@ -1765,17 +1760,18 @@ function PdfReducer() { className="mt-1 w-full rounded-md border bg-background px-3 py-2 text-sm" > - - + + - {mode === "extreme" && ( -

- Aplică compresie maximă în mai multe treceri succesive. Durează mai - mult dar reduce semnificativ dimensiunea. -

- )} +

+ {mode === "extreme" + ? "Reduce maxim dimensiunea. Imaginile pot pierde detalii fine." + : mode === "max" + ? "Balanță bună între dimensiune și calitate. Recomandat pentru majoritatea fișierelor." + : "Pierdere minimă de calitate. Ideal pentru documente cu grafice detaliate."} +