fix(pdf-compress): remove /screen preset that destroys font encoding

The -dPDFSETTINGS=/screen GS preset overwrites font encoding tables,
producing garbled text in output PDFs. Replace with individual params
that ONLY compress images while preserving fonts intact.

Three quality levels via GS (no Stirling dependency):
- extreme: 100 DPI, QFactor 1.2 (~quality 35)
- high: 150 DPI, QFactor 0.76 (~quality 50)
- balanced: 200 DPI, QFactor 0.4 (~quality 70)

Route all UI modes through the GS endpoint with level parameter.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
AI Assistant
2026-03-13 17:19:42 +02:00
parent 9e73dc3cb9
commit d75fcb1d1c
2 changed files with 102 additions and 47 deletions
+89 -30
View File
@@ -8,58 +8,75 @@ import { tmpdir } from "os";
const execFileAsync = promisify(execFile); const execFileAsync = promisify(execFile);
// Ghostscript args for extreme compression // Ghostscript args for PDF compression.
// Key: -dPassThroughJPEGImages=false forces recompression of existing JPEGs //
// QFactor 1.5 ≈ JPEG quality 25-30, matching iLovePDF extreme // CRITICAL: Do NOT use -dPDFSETTINGS=/screen — it overrides font encoding
function gsArgs(input: string, output: string): string[] { // and produces garbled text. Instead, set each parameter individually so we
// only compress IMAGES while keeping fonts and text intact.
//
// Strategy: recompress all raster images to JPEG at quality ~40-50,
// downsample to 150 DPI, deduplicate, compress streams. Fonts untouched.
function gsArgs(
input: string,
output: string,
level: "extreme" | "high" | "balanced",
): string[] {
// Quality presets — only affect images, never fonts
const presets = {
extreme: { dpi: 100, qfactor: 1.2 }, // ~quality 35, aggressive
high: { dpi: 150, qfactor: 0.76 }, // ~quality 50, good balance
balanced: { dpi: 200, qfactor: 0.4 }, // ~quality 70, minimal loss
};
const { dpi, qfactor } = presets[level];
return [ return [
"-sDEVICE=pdfwrite", "-sDEVICE=pdfwrite",
"-dCompatibilityLevel=1.5", "-dCompatibilityLevel=1.5",
"-dNOPAUSE", "-dNOPAUSE",
"-dBATCH", "-dBATCH",
`-sOutputFile=${output}`, `-sOutputFile=${output}`,
"-dPDFSETTINGS=/screen",
// Force recompression of ALL images (the #1 key to matching iLovePDF) // ── Image recompression (the main size reducer) ──
// Force re-encode of existing JPEGs — without this, GS passes them through
"-dPassThroughJPEGImages=false", "-dPassThroughJPEGImages=false",
"-dPassThroughJPXImages=false", "-dPassThroughJPXImages=false",
// Use DCT (JPEG) for all color/gray images
"-dAutoFilterColorImages=false", "-dAutoFilterColorImages=false",
"-dAutoFilterGrayImages=false", "-dAutoFilterGrayImages=false",
"-dColorImageFilter=/DCTEncode", "-dColorImageFilter=/DCTEncode",
"-dGrayImageFilter=/DCTEncode", "-dGrayImageFilter=/DCTEncode",
// Aggressive downsampling "-dEncodeColorImages=true",
"-dEncodeGrayImages=true",
// ── Downsampling ──
"-dDownsampleColorImages=true", "-dDownsampleColorImages=true",
"-dDownsampleGrayImages=true", "-dDownsampleGrayImages=true",
"-dDownsampleMonoImages=true", "-dDownsampleMonoImages=true",
"-dColorImageResolution=72", `-dColorImageResolution=${dpi}`,
"-dGrayImageResolution=72", `-dGrayImageResolution=${dpi}`,
"-dMonoImageResolution=150", `-dMonoImageResolution=${Math.max(dpi, 200)}`, // mono needs higher DPI
"-dColorImageDownsampleType=/Bicubic", "-dColorImageDownsampleType=/Bicubic",
"-dGrayImageDownsampleType=/Bicubic", "-dGrayImageDownsampleType=/Bicubic",
"-dColorImageDownsampleThreshold=1.0", "-dColorImageDownsampleThreshold=1.0",
"-dGrayImageDownsampleThreshold=1.0", "-dGrayImageDownsampleThreshold=1.0",
"-dMonoImageDownsampleThreshold=1.0", "-dMonoImageDownsampleThreshold=1.0",
// Encoding
"-dEncodeColorImages=true", // ── Font handling — PRESERVE everything ──
"-dEncodeGrayImages=true", "-dSubsetFonts=true", // subset is safe — keeps encoding, reduces size
// Font & structure "-dEmbedAllFonts=true", // ensure all fonts stay embedded
"-dSubsetFonts=true",
"-dEmbedAllFonts=true",
"-dCompressFonts=true", "-dCompressFonts=true",
// ── Structure / stream optimization ──
"-dCompressStreams=true", "-dCompressStreams=true",
// CMYK→RGB (saves ~25% on CMYK images)
"-sColorConversionStrategy=RGB",
// Structure optimization
"-dDetectDuplicateImages=true", "-dDetectDuplicateImages=true",
"-dWriteXRefStm=true", "-sColorConversionStrategy=RGB", // CMYK→RGB saves ~25% on CMYK images
"-dWriteObjStms=true",
"-dPreserveMarkedContent=false", // ── JPEG quality dictionaries ──
"-dOmitXMP=true",
// JPEG quality dictionaries (QFactor 1.5 ≈ quality 25-30)
"-c", "-c",
"<< /ColorACSImageDict << /QFactor 1.5 /Blend 1 /ColorTransform 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams", `<< /ColorACSImageDict << /QFactor ${qfactor} /Blend 1 /ColorTransform 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams`,
"<< /GrayACSImageDict << /QFactor 1.5 /Blend 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams", `<< /GrayACSImageDict << /QFactor ${qfactor} /Blend 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams`,
"<< /ColorImageDict << /QFactor 1.5 /Blend 1 /ColorTransform 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams", `<< /ColorImageDict << /QFactor ${qfactor} /Blend 1 /ColorTransform 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams`,
"<< /GrayImageDict << /QFactor 1.5 /Blend 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams", `<< /GrayImageDict << /QFactor ${qfactor} /Blend 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams`,
"-f", "-f",
input, input,
]; ];
@@ -129,6 +146,42 @@ function extractFileFromMultipart(
return null; return null;
} }
/**
* Extract a simple text field value from a multipart body.
* Returns null if the field is not found.
*/
function extractFieldFromMultipart(
raw: Buffer,
boundary: string,
fieldName: string,
): string | null {
const boundaryBuf = Buffer.from(`--${boundary}`);
const headerSep = Buffer.from("\r\n\r\n");
const crlf = Buffer.from("\r\n");
const namePattern = `name="${fieldName}"`;
let searchFrom = 0;
while (searchFrom < raw.length) {
const partStart = raw.indexOf(boundaryBuf, searchFrom);
if (partStart === -1) break;
const lineEnd = raw.indexOf(crlf, partStart);
if (lineEnd === -1) break;
const headerEnd = raw.indexOf(headerSep, lineEnd);
if (headerEnd === -1) break;
const headers = raw.subarray(lineEnd + 2, headerEnd).toString("utf8");
if (headers.includes(namePattern) && !headers.includes("filename=")) {
const valueStart = headerEnd + 4;
const nextBoundary = raw.indexOf(Buffer.from(`\r\n--${boundary}`), valueStart);
if (nextBoundary > valueStart) {
return raw.subarray(valueStart, nextBoundary).toString("utf8").trim();
}
}
searchFrom = headerEnd + 4;
}
return null;
}
async function cleanup(dir: string) { async function cleanup(dir: string) {
try { try {
const { readdir } = await import("fs/promises"); const { readdir } = await import("fs/promises");
@@ -186,14 +239,20 @@ export async function POST(req: NextRequest) {
); );
} }
// Extract compression level from multipart (optional "level" field)
const levelParam = extractFieldFromMultipart(rawBuf, boundary, "level");
const level: "extreme" | "high" | "balanced" =
levelParam === "high" ? "high" :
levelParam === "balanced" ? "balanced" : "extreme";
await writeFile(inputPath, pdfBuffer); await writeFile(inputPath, pdfBuffer);
const originalSize = pdfBuffer.length; const originalSize = pdfBuffer.length;
// Step 1: Ghostscript — aggressive image recompression + downsampling // Step 1: Ghostscript — image recompression + downsampling (fonts untouched)
try { try {
const { stderr } = await execFileAsync( const { stderr } = await execFileAsync(
"gs", "gs",
gsArgs(inputPath, gsOutputPath), gsArgs(inputPath, gsOutputPath, level),
{ {
timeout: 300_000, // 5 min for very large files timeout: 300_000, // 5 min for very large files
maxBuffer: 10 * 1024 * 1024, // 10MB stderr buffer maxBuffer: 10 * 1024 * 1024, // 10MB stderr buffer
@@ -1662,15 +1662,10 @@ function PdfReducer() {
try { try {
const formData = new FormData(); const formData = new FormData();
formData.append("fileInput", file); formData.append("fileInput", file);
// All modes use the GS endpoint with a level parameter
formData.append("level", mode === "extreme" ? "extreme" : mode === "max" ? "high" : "balanced");
let endpoint = "/api/compress-pdf"; const res = await fetch("/api/compress-pdf/extreme", {
if (mode === "extreme") {
endpoint = "/api/compress-pdf/extreme";
} else {
formData.append("optimizeLevel", mode === "max" ? "4" : "2");
}
const res = await fetch(endpoint, {
method: "POST", method: "POST",
body: formData, body: formData,
}); });
@@ -1765,17 +1760,18 @@ function PdfReducer() {
className="mt-1 w-full rounded-md border bg-background px-3 py-2 text-sm" className="mt-1 w-full rounded-md border bg-background px-3 py-2 text-sm"
> >
<option value="extreme"> <option value="extreme">
Compresie extremă multi-pass (recomandat) Extremă imagini 100 DPI, calitate scăzută
</option> </option>
<option value="max">Compresie maximă single-pass</option> <option value="max">Puternică imagini 150 DPI, calitate medie (recomandat)</option>
<option value="balanced">Echilibrat</option> <option value="balanced">Echilibrată imagini 200 DPI, calitate bună</option>
</select> </select>
{mode === "extreme" && (
<p className="text-xs text-muted-foreground"> <p className="text-xs text-muted-foreground">
Aplică compresie maximă în mai multe treceri succesive. Durează mai {mode === "extreme"
mult dar reduce semnificativ dimensiunea. ? "Reduce maxim dimensiunea. Imaginile pot pierde detalii fine."
: mode === "max"
? "Balanță bună între dimensiune și calitate. Recomandat pentru majoritatea fișierelor."
: "Pierdere minimă de calitate. Ideal pentru documente cu grafice detaliate."}
</p> </p>
)}
</div> </div>
<Button onClick={handleCompress} disabled={!file || loading}> <Button onClick={handleCompress} disabled={!file || loading}>