added file domain logic, updated drizzle package

This commit is contained in:
user
2026-03-01 05:56:15 +02:00
parent 1c2584df58
commit 5a5f565377
27 changed files with 5757 additions and 223 deletions

View File

@@ -0,0 +1,132 @@
import { createHash } from "crypto";
import type { DocumentProcessingOptions, FileProcessingResult } from "../data";
/**
* Process documents (PDF, text files, etc.)
*/
export async function processDocument(
buffer: Buffer | Uint8Array,
mimeType: string,
options: DocumentProcessingOptions = {},
): Promise<FileProcessingResult> {
try {
const inputBuffer = Buffer.from(buffer);
const metadata: Record<string, any> = {};
// Basic document information
metadata.original = {
size: inputBuffer.length,
mimeType,
hash: createHash("sha256").update(inputBuffer).digest("hex"),
};
// Process based on document type
if (mimeType === "application/pdf") {
return await processPDF(inputBuffer, options);
} else if (mimeType.startsWith("text/")) {
return await processTextFile(inputBuffer, options);
} else {
return await processGenericDocument(inputBuffer, options);
}
} catch (error) {
return {
processed: false,
error: `Document processing failed: ${error instanceof Error ? error.message : String(error)}`,
};
}
}
async function processPDF(
buffer: Buffer,
options: DocumentProcessingOptions,
): Promise<FileProcessingResult> {
const metadata: Record<string, any> = {
type: "pdf",
processed: true,
};
// In a real implementation, you would use a PDF library like pdf2pic or pdf-parse
// For now, we'll just provide basic processing
if (options.extractMetadata) {
// Extract PDF metadata (page count, author, title, etc.)
metadata.pdf = {
// This would be extracted using a PDF library
pageCount: 1, // Placeholder
title: "Unknown",
author: "Unknown",
creationDate: new Date().toISOString(),
};
}
if (options.extractText) {
// Extract text content from PDF
metadata.textContent = {
extracted: true,
characterCount: 0, // Placeholder
wordCount: 0, // Placeholder
};
}
if (options.generatePreview) {
// Generate thumbnail/preview of first page
metadata.preview = {
generated: true,
format: "png",
};
}
return {
processed: true,
originalFile: buffer,
processedFile: buffer, // PDFs typically don't need processing
metadata,
};
}
async function processTextFile(
buffer: Buffer,
options: DocumentProcessingOptions,
): Promise<FileProcessingResult> {
const text = buffer.toString("utf-8");
const metadata: Record<string, any> = {
type: "text",
processed: true,
};
if (options.extractText || options.extractMetadata) {
const lines = text.split("\n");
const words = text.split(/\s+/).filter((word) => word.length > 0);
metadata.textAnalysis = {
characterCount: text.length,
wordCount: words.length,
lineCount: lines.length,
encoding: "utf-8",
};
}
return {
processed: true,
originalFile: buffer,
processedFile: buffer,
metadata,
};
}
async function processGenericDocument(
buffer: Buffer,
options: DocumentProcessingOptions,
): Promise<FileProcessingResult> {
const metadata: Record<string, any> = {
type: "generic",
processed: true,
};
return {
processed: true,
originalFile: buffer,
processedFile: buffer,
metadata,
};
}

View File

@@ -0,0 +1,286 @@
import sharp from "sharp";
import type { FileProcessingResult, ImageProcessingOptions } from "../data";
/**
* Process images with compression, resizing, format conversion, and thumbnail generation
*/
export async function processImage(
buffer: Buffer | Uint8Array,
options: ImageProcessingOptions = {},
): Promise<FileProcessingResult> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
let processedBuffer = inputBuffer;
let thumbnailBuffer: Buffer | undefined;
const metadata: Record<string, any> = {};
// Initialize Sharp instance
const image = sharp(inputBuffer);
const originalMetadata = await image.metadata();
// Store original metadata
metadata.original = {
width: originalMetadata.width,
height: originalMetadata.height,
format: originalMetadata.format,
size: inputBuffer.length,
colorSpace: originalMetadata.space,
channels: originalMetadata.channels,
density: originalMetadata.density,
hasAlpha: originalMetadata.hasAlpha,
};
// Apply transformations
let transformedImage = image;
// Resize if requested
if (options.resize) {
const { width, height, fit = "cover" } = options.resize;
transformedImage = transformedImage.resize(width, height, {
fit: fit as keyof sharp.FitEnum,
withoutEnlargement: true, // Don't enlarge smaller images
});
metadata.processed = {
...metadata.processed,
resized: true,
targetWidth: width,
targetHeight: height,
fit,
};
}
// Apply format conversion and quality settings
const outputFormat = options.format || "webp";
const quality = options.quality || 85;
switch (outputFormat) {
case "jpeg":
transformedImage = transformedImage.jpeg({
quality,
progressive: true,
mozjpeg: true, // Use mozjpeg encoder for better compression
});
break;
case "png":
transformedImage = transformedImage.png({
quality,
compressionLevel: 9,
progressive: true,
});
break;
case "webp":
transformedImage = transformedImage.webp({
quality,
effort: 6, // Max compression effort
});
break;
case "avif":
transformedImage = transformedImage.avif({
quality,
effort: 6,
});
break;
default:
// Keep original format but apply quality if possible
if (originalMetadata.format === "jpeg") {
transformedImage = transformedImage.jpeg({ quality });
} else if (originalMetadata.format === "png") {
transformedImage = transformedImage.png({ quality });
}
}
// Generate processed image
processedBuffer = await transformedImage.toBuffer();
// Get final metadata
const finalMetadata = await sharp(processedBuffer).metadata();
metadata.processed = {
...metadata.processed,
width: finalMetadata.width,
height: finalMetadata.height,
format: outputFormat,
size: processedBuffer.length,
quality,
compressionRatio: inputBuffer.length / processedBuffer.length,
};
// Generate thumbnail if requested
if (options.generateThumbnail) {
const thumbSize = options.thumbnailSize || {
width: 300,
height: 300,
};
thumbnailBuffer = await sharp(inputBuffer)
.resize(thumbSize.width, thumbSize.height, {
fit: "cover",
position: "center",
})
.webp({ quality: 80 })
.toBuffer();
const thumbMetadata = await sharp(thumbnailBuffer).metadata();
metadata.thumbnail = {
width: thumbMetadata.width,
height: thumbMetadata.height,
format: "webp",
size: thumbnailBuffer.length,
};
}
// Add processing stats
metadata.processing = {
processedAt: new Date().toISOString(),
sizeSaving: inputBuffer.length - processedBuffer.length,
sizeSavingPercentage:
((inputBuffer.length - processedBuffer.length) /
inputBuffer.length) *
100,
processingTime: Date.now(), // You'd measure this properly in production
};
return {
processed: true,
originalFile: inputBuffer,
processedFile: processedBuffer,
thumbnail: thumbnailBuffer,
metadata,
};
} catch (error) {
return {
processed: false,
error: `Image processing failed: ${error instanceof Error ? error.message : String(error)}`,
};
}
}
/**
* Extract image metadata without processing
*/
export async function extractImageMetadata(
buffer: Buffer | Uint8Array,
): Promise<Record<string, any>> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
const image = sharp(inputBuffer);
const metadata = await image.metadata();
return {
width: metadata.width,
height: metadata.height,
format: metadata.format,
size: inputBuffer.length,
colorSpace: metadata.space,
channels: metadata.channels,
density: metadata.density,
hasAlpha: metadata.hasAlpha,
isAnimated: metadata.pages && metadata.pages > 1,
orientation: metadata.orientation,
};
} catch (error) {
throw new Error(
`Failed to extract image metadata: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Generate multiple sizes for responsive images
*/
export async function generateResponsiveSizes(
buffer: Buffer | Uint8Array,
sizes: Array<{ name: string; width: number; height?: number }> = [
{ name: "small", width: 400 },
{ name: "medium", width: 800 },
{ name: "large", width: 1200 },
{ name: "xlarge", width: 1920 },
],
): Promise<Record<string, Buffer>> {
const results: Record<string, Buffer> = {};
const inputBuffer = Buffer.isBuffer(buffer) ? buffer : Buffer.from(buffer);
try {
for (const size of sizes) {
const resized = await sharp(inputBuffer)
.resize(size.width, size.height, {
fit: "inside",
withoutEnlargement: true,
})
.webp({ quality: 85 })
.toBuffer();
results[size.name] = resized;
}
return results;
} catch (error) {
throw new Error(
`Failed to generate responsive sizes: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Create an optimized avatar image
*/
export async function processAvatar(
buffer: Buffer | Uint8Array,
size: number = 200,
): Promise<Buffer> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
return await sharp(inputBuffer)
.resize(size, size, { fit: "cover", position: "center" })
.webp({ quality: 90 })
.toBuffer();
} catch (error) {
throw new Error(
`Avatar processing failed: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Remove EXIF data from images for privacy
*/
export async function stripExifData(
buffer: Buffer | Uint8Array,
): Promise<Buffer> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
return await sharp(inputBuffer)
.rotate() // Auto-rotate based on EXIF, then removes EXIF
.toBuffer();
} catch (error) {
throw new Error(
`EXIF stripping failed: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Validate if buffer contains a valid image
*/
export async function validateImage(
buffer: Buffer | Uint8Array,
): Promise<boolean> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
const metadata = await sharp(inputBuffer).metadata();
return !!(metadata.width && metadata.height && metadata.format);
} catch {
return false;
}
}

View File

@@ -0,0 +1,3 @@
export * from "./document-processor";
export * from "./image-processor";
export * from "./video-processor";

View File

@@ -0,0 +1,62 @@
import { createHash } from "crypto";
import type { FileProcessingResult, VideoProcessingOptions } from "../data";
/**
* Process video files (extract metadata, generate thumbnails)
* Note: This is a basic implementation. For production use, you'd want to use FFmpeg
*/
export async function processVideo(
buffer: Buffer | Uint8Array,
mimeType: string,
options: VideoProcessingOptions = {},
): Promise<FileProcessingResult> {
try {
const inputBuffer = Buffer.from(buffer);
const metadata: Record<string, any> = {};
// Basic video information
metadata.original = {
size: inputBuffer.length,
mimeType,
hash: createHash("sha256").update(inputBuffer).digest("hex"),
};
// For a real implementation, you would use FFmpeg through a library like fluent-ffmpeg
// This is a placeholder implementation
if (options.extractMetadata) {
metadata.video = {
// These would be extracted using FFmpeg
duration: 0, // seconds
width: 1920, // placeholder
height: 1080, // placeholder
framerate: 30, // placeholder
bitrate: 5000000, // placeholder
codec: "h264", // placeholder
};
}
if (options.generateThumbnail) {
// Generate video thumbnail at specified timestamp
// This would use FFmpeg to extract a frame
metadata.thumbnail = {
generated: true,
timestamp: options.thumbnailTimestamp || 0,
format: "jpeg",
size: options.thumbnailSize || { width: 640, height: 360 },
};
}
return {
processed: true,
originalFile: inputBuffer,
processedFile: inputBuffer, // Videos are typically not re-encoded during upload
metadata,
};
} catch (error) {
return {
processed: false,
error: `Video processing failed: ${error instanceof Error ? error.message : String(error)}`,
};
}
}