Mini Utilities v0.2.0: extreme PDF compression (GS+qpdf), DWG→DXF, paste support, drag-drop layers
- Extreme PDF compression via direct Ghostscript + qpdf pipeline (PassThroughJPEGImages=false, QFactor 1.5, 72 DPI downsample) - DWG→DXF converter via libredwg (Docker only) - PDF unlock in-app via Stirling PDF proxy - Removed PDF/A tab (unused) - Paste (Ctrl+V) on all file drop zones - Mouse drag-drop reordering on thermal layers - Tabs reorganized into 2 visual rows - Dockerfile: added ghostscript, qpdf, libredwg
This commit is contained in:
@@ -0,0 +1,187 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { writeFile, readFile, unlink, mkdir } from "fs/promises";
|
||||
import { execFile } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { randomUUID } from "crypto";
|
||||
import { join } from "path";
|
||||
import { tmpdir } from "os";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
|
||||
// Ghostscript args for extreme compression
|
||||
// Key: -dPassThroughJPEGImages=false forces recompression of existing JPEGs
|
||||
// QFactor 1.5 ≈ JPEG quality 25-30, matching iLovePDF extreme
|
||||
function gsArgs(input: string, output: string): string[] {
|
||||
return [
|
||||
"-sDEVICE=pdfwrite",
|
||||
"-dCompatibilityLevel=1.5",
|
||||
"-dNOPAUSE",
|
||||
"-dBATCH",
|
||||
"-dQUIET",
|
||||
`-sOutputFile=${output}`,
|
||||
"-dPDFSETTINGS=/screen",
|
||||
// Force recompression of ALL images (the #1 key to matching iLovePDF)
|
||||
"-dPassThroughJPEGImages=false",
|
||||
"-dPassThroughJPXImages=false",
|
||||
"-dAutoFilterColorImages=false",
|
||||
"-dAutoFilterGrayImages=false",
|
||||
"-dColorImageFilter=/DCTEncode",
|
||||
"-dGrayImageFilter=/DCTEncode",
|
||||
// Aggressive downsampling
|
||||
"-dDownsampleColorImages=true",
|
||||
"-dDownsampleGrayImages=true",
|
||||
"-dDownsampleMonoImages=true",
|
||||
"-dColorImageResolution=72",
|
||||
"-dGrayImageResolution=72",
|
||||
"-dMonoImageResolution=150",
|
||||
"-dColorImageDownsampleType=/Bicubic",
|
||||
"-dGrayImageDownsampleType=/Bicubic",
|
||||
"-dColorImageDownsampleThreshold=1.0",
|
||||
"-dGrayImageDownsampleThreshold=1.0",
|
||||
"-dMonoImageDownsampleThreshold=1.0",
|
||||
// Encoding
|
||||
"-dEncodeColorImages=true",
|
||||
"-dEncodeGrayImages=true",
|
||||
// Font & structure
|
||||
"-dSubsetFonts=true",
|
||||
"-dEmbedAllFonts=true",
|
||||
"-dCompressFonts=true",
|
||||
"-dCompressStreams=true",
|
||||
// CMYK→RGB (saves ~25% on CMYK images)
|
||||
"-sColorConversionStrategy=RGB",
|
||||
// Structure optimization
|
||||
"-dDetectDuplicateImages=true",
|
||||
"-dWriteXRefStm=true",
|
||||
"-dWriteObjStms=true",
|
||||
"-dPreserveMarkedContent=false",
|
||||
"-dOmitXMP=true",
|
||||
// JPEG quality dictionaries (QFactor 1.5 ≈ quality 25-30)
|
||||
"-c",
|
||||
"<< /ColorACSImageDict << /QFactor 1.5 /Blend 1 /ColorTransform 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams",
|
||||
"<< /GrayACSImageDict << /QFactor 1.5 /Blend 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams",
|
||||
"<< /ColorImageDict << /QFactor 1.5 /Blend 1 /ColorTransform 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams",
|
||||
"<< /GrayImageDict << /QFactor 1.5 /Blend 1 /HSamples [2 1 1 2] /VSamples [2 1 1 2] >> >> setdistillerparams",
|
||||
"-f",
|
||||
input,
|
||||
];
|
||||
}
|
||||
|
||||
// qpdf args for structure polish (5-15% additional saving)
|
||||
function qpdfArgs(input: string, output: string): string[] {
|
||||
return [
|
||||
input,
|
||||
output,
|
||||
"--object-streams=generate",
|
||||
"--compress-streams=y",
|
||||
"--recompress-flate",
|
||||
"--compression-level=9",
|
||||
"--remove-unreferenced-resources=yes",
|
||||
"--linearize",
|
||||
];
|
||||
}
|
||||
|
||||
async function cleanup(dir: string) {
|
||||
try {
|
||||
const { readdir } = await import("fs/promises");
|
||||
const files = await readdir(dir);
|
||||
for (const f of files) {
|
||||
await unlink(join(dir, f)).catch(() => {});
|
||||
}
|
||||
const { rmdir } = await import("fs/promises");
|
||||
await rmdir(dir).catch(() => {});
|
||||
} catch {
|
||||
// cleanup failure is non-critical
|
||||
}
|
||||
}
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
const tmpDir = join(tmpdir(), `pdf-extreme-${randomUUID()}`);
|
||||
try {
|
||||
const formData = await req.formData();
|
||||
const fileBlob = formData.get("fileInput") as Blob | null;
|
||||
if (!fileBlob) {
|
||||
return NextResponse.json(
|
||||
{ error: "Lipsește fișierul PDF." },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
const originalSize = fileBlob.size;
|
||||
await mkdir(tmpDir, { recursive: true });
|
||||
|
||||
const inputPath = join(tmpDir, "input.pdf");
|
||||
const gsOutputPath = join(tmpDir, "gs-output.pdf");
|
||||
const finalOutputPath = join(tmpDir, "final.pdf");
|
||||
|
||||
await writeFile(inputPath, Buffer.from(await fileBlob.arrayBuffer()));
|
||||
|
||||
// Step 1: Ghostscript — aggressive image recompression + downsampling
|
||||
try {
|
||||
await execFileAsync("gs", gsArgs(inputPath, gsOutputPath), {
|
||||
timeout: 120_000,
|
||||
});
|
||||
} catch (gsErr) {
|
||||
const msg = gsErr instanceof Error ? gsErr.message : "Ghostscript failed";
|
||||
if (msg.includes("ENOENT") || msg.includes("not found")) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
"Ghostscript nu este instalat pe server. Trebuie adăugat `ghostscript` în Dockerfile.",
|
||||
},
|
||||
{ status: 501 },
|
||||
);
|
||||
}
|
||||
return NextResponse.json(
|
||||
{ error: `Ghostscript error: ${msg}` },
|
||||
{ status: 500 },
|
||||
);
|
||||
}
|
||||
|
||||
// Step 2: qpdf — structure optimization + linearization
|
||||
let finalPath = gsOutputPath;
|
||||
try {
|
||||
await execFileAsync("qpdf", qpdfArgs(gsOutputPath, finalOutputPath), {
|
||||
timeout: 30_000,
|
||||
});
|
||||
finalPath = finalOutputPath;
|
||||
} catch {
|
||||
// qpdf failed or not installed — GS output is still good
|
||||
}
|
||||
|
||||
const resultBuffer = await readFile(finalPath);
|
||||
const compressedSize = resultBuffer.length;
|
||||
|
||||
// If compression made it bigger, return original
|
||||
if (compressedSize >= originalSize) {
|
||||
const originalBuffer = await readFile(inputPath);
|
||||
return new NextResponse(originalBuffer, {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "application/pdf",
|
||||
"Content-Disposition":
|
||||
'attachment; filename="compressed-extreme.pdf"',
|
||||
"X-Original-Size": String(originalSize),
|
||||
"X-Compressed-Size": String(originalSize),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return new NextResponse(resultBuffer, {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "application/pdf",
|
||||
"Content-Disposition": 'attachment; filename="compressed-extreme.pdf"',
|
||||
"X-Original-Size": String(originalSize),
|
||||
"X-Compressed-Size": String(compressedSize),
|
||||
},
|
||||
});
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : "Unknown error";
|
||||
return NextResponse.json(
|
||||
{ error: `Eroare la compresia extremă: ${message}` },
|
||||
{ status: 500 },
|
||||
);
|
||||
} finally {
|
||||
await cleanup(tmpDir);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
|
||||
const STIRLING_PDF_URL =
|
||||
process.env.STIRLING_PDF_URL ?? "http://10.10.10.166:8087";
|
||||
const STIRLING_PDF_API_KEY =
|
||||
process.env.STIRLING_PDF_API_KEY ?? "cd829f62-6eef-43eb-a64d-c91af727b53a";
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
try {
|
||||
const formData = await req.formData();
|
||||
|
||||
const res = await fetch(
|
||||
`${STIRLING_PDF_URL}/api/v1/security/remove-password`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: { "X-API-KEY": STIRLING_PDF_API_KEY },
|
||||
body: formData,
|
||||
},
|
||||
);
|
||||
|
||||
if (!res.ok) {
|
||||
const text = await res.text().catch(() => res.statusText);
|
||||
return NextResponse.json(
|
||||
{ error: `Stirling PDF error: ${res.status} — ${text}` },
|
||||
{ status: res.status },
|
||||
);
|
||||
}
|
||||
|
||||
const blob = await res.blob();
|
||||
const buffer = Buffer.from(await blob.arrayBuffer());
|
||||
|
||||
return new NextResponse(buffer, {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "application/pdf",
|
||||
"Content-Disposition": 'attachment; filename="unlocked.pdf"',
|
||||
},
|
||||
});
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : "Unknown error";
|
||||
return NextResponse.json(
|
||||
{ error: `Nu s-a putut contacta Stirling PDF: ${message}` },
|
||||
{ status: 502 },
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
import { writeFile, readFile, unlink, mkdir } from "fs/promises";
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { randomUUID } from "crypto";
|
||||
import { join } from "path";
|
||||
import { tmpdir } from "os";
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
export async function POST(req: NextRequest) {
|
||||
const tmpDir = join(tmpdir(), `dwg-${randomUUID()}`);
|
||||
try {
|
||||
const formData = await req.formData();
|
||||
const file = formData.get("fileInput") as File | null;
|
||||
if (!file) {
|
||||
return NextResponse.json(
|
||||
{ error: "Lipsește fișierul DWG." },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
const name = file.name.replace(/[^a-zA-Z0-9._-]/g, "_");
|
||||
if (!name.toLowerCase().endsWith(".dwg")) {
|
||||
return NextResponse.json(
|
||||
{ error: "Fișierul trebuie să fie .dwg" },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
await mkdir(tmpDir, { recursive: true });
|
||||
|
||||
const inputPath = join(tmpDir, name);
|
||||
const outputPath = inputPath.replace(/\.dwg$/i, ".dxf");
|
||||
|
||||
const buffer = Buffer.from(await file.arrayBuffer());
|
||||
await writeFile(inputPath, buffer);
|
||||
|
||||
await execAsync(`dwg2dxf "${inputPath}"`, { timeout: 60_000 });
|
||||
|
||||
const dxfBuffer = await readFile(outputPath);
|
||||
const dxfName = name.replace(/\.dwg$/i, ".dxf");
|
||||
|
||||
return new NextResponse(dxfBuffer, {
|
||||
status: 200,
|
||||
headers: {
|
||||
"Content-Type": "application/dxf",
|
||||
"Content-Disposition": `attachment; filename="${dxfName}"`,
|
||||
},
|
||||
});
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : "Unknown error";
|
||||
if (
|
||||
message.includes("ENOENT") ||
|
||||
message.includes("not found") ||
|
||||
message.includes("not recognized")
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error:
|
||||
"Conversia DWG→DXF este disponibilă doar pe server (Docker). Local nu este instalat dwg2dxf (libredwg).",
|
||||
},
|
||||
{ status: 501 },
|
||||
);
|
||||
}
|
||||
return NextResponse.json(
|
||||
{ error: `Eroare la conversie DWG→DXF: ${message}` },
|
||||
{ status: 500 },
|
||||
);
|
||||
} finally {
|
||||
// Clean up temp files
|
||||
try {
|
||||
const { readdir } = await import("fs/promises");
|
||||
const files = await readdir(tmpDir);
|
||||
for (const f of files) {
|
||||
await unlink(join(tmpDir, f)).catch(() => {});
|
||||
}
|
||||
const { rmdir } = await import("fs/promises");
|
||||
await rmdir(tmpDir).catch(() => {});
|
||||
} catch {
|
||||
// temp cleanup failure is non-critical
|
||||
}
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user