added file domain logic, updated drizzle package

This commit is contained in:
user
2026-03-01 05:56:15 +02:00
parent 1c2584df58
commit 5a5f565377
27 changed files with 5757 additions and 223 deletions

View File

@@ -0,0 +1,484 @@
import {
CopyObjectCommand,
DeleteObjectCommand,
GetObjectCommand,
HeadObjectCommand,
PutObjectCommand,
S3Client,
} from "@aws-sdk/client-s3";
import type {
FileMetadata,
FileUploadConfig,
PresignedUrlResult,
UploadOptions,
UploadResult,
} from "./data";
import {
generateFileHash,
generateObjectKey,
isDocumentFile,
isImageFile,
isVideoFile,
} from "./utils";
import { processDocument } from "./processors/document-processor";
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
import { processVideo } from "./processors/video-processor";
import { processImage } from "./processors/image-processor";
import { ERROR_CODES, type Result } from "@pkg/result";
import { getError, logger } from "@pkg/logger";
import { validateFile } from "./validation";
export class R2StorageClient {
private s3Client: S3Client;
private config: FileUploadConfig;
constructor(config: FileUploadConfig) {
this.config = config;
this.s3Client = new S3Client({
region: config.region,
endpoint: config.endpoint,
credentials: {
accessKeyId: config.accessKey,
secretAccessKey: config.secretKey,
},
});
}
/**
* Upload a file directly to R2
*/
async uploadFile(
file: Buffer | Uint8Array,
originalName: string,
mimeType: string,
userId: string,
options?: UploadOptions,
): Promise<Result<UploadResult>> {
try {
// Validate file
const validationResult = validateFile(
file,
originalName,
mimeType,
this.config,
);
if (!validationResult.isValid) {
return {
error: getError({
code: ERROR_CODES.VALIDATION_ERROR,
message: "File validation failed",
description: validationResult.errors.join(", "),
detail: "File validation failed",
}),
};
}
// Generate file hash for deduplication
const hash = generateFileHash(file);
// Generate unique filename and object key
const fileId = crypto.randomUUID();
const extension = originalName.split(".").pop() || "";
const filename = `${fileId}.${extension}`;
const objectKey = generateObjectKey(userId, filename);
let processedFile = Buffer.from(file);
let thumbnailBuffer: Buffer | undefined;
let metadata = options?.metadata ? { ...options?.metadata } : {};
// Process file based on type
if (options?.processImage && isImageFile(mimeType)) {
const processingResult = await processImage(file, {
format: "webp",
quality: 85,
generateThumbnail: true,
thumbnailSize: { width: 300, height: 300 },
resize: {
width: 1920,
height: 1920,
fit: "inside",
},
});
if (
processingResult.processed &&
processingResult.processedFile
) {
processedFile = Buffer.from(processingResult.processedFile);
thumbnailBuffer = processingResult.thumbnail
? Buffer.from(processingResult.thumbnail)
: undefined;
metadata = { ...metadata, ...processingResult.metadata };
}
} else if (options?.processDocument && isDocumentFile(mimeType)) {
const processingResult = await processDocument(file, mimeType, {
extractText: true,
generatePreview: true,
extractMetadata: true,
});
if (processingResult.processed && processingResult.metadata) {
metadata = { ...metadata, ...processingResult.metadata };
}
} else if (options?.processVideo && isVideoFile(mimeType)) {
const processingResult = await processVideo(file, mimeType, {
generateThumbnail: true,
extractMetadata: true,
thumbnailTimestamp: 1, // 1 second into video
});
if (processingResult.processed && processingResult.metadata) {
metadata = { ...metadata, ...processingResult.metadata };
}
}
// Upload main file to R2
const uploadCommand = new PutObjectCommand({
Bucket: this.config.bucketName,
Key: objectKey,
Body: processedFile,
ContentType: mimeType,
Metadata: {
originalName,
userId,
hash,
uploadId: fileId,
processed: "true",
...Object.fromEntries(
Object.entries(metadata).map(([key, value]) => [
key,
typeof value === "string"
? value
: JSON.stringify(value),
]),
),
},
});
await this.s3Client.send(uploadCommand);
// Upload thumbnail if generated
if (thumbnailBuffer) {
const thumbnailKey = `thumbnails/${userId}/${fileId}_thumb.webp`;
const thumbnailCommand = new PutObjectCommand({
Bucket: this.config.bucketName,
Key: thumbnailKey,
Body: thumbnailBuffer,
ContentType: "image/webp",
Metadata: {
originalFileId: fileId,
type: "thumbnail",
},
});
await this.s3Client.send(thumbnailCommand);
metadata.thumbnailKey = thumbnailKey;
}
// Construct R2 URL
const r2Url = `${this.config.publicUrl || this.config.endpoint}/${objectKey}`;
const fileMetadata: FileMetadata = {
id: fileId,
filename,
originalName,
mimeType,
size: processedFile.length,
hash,
bucketName: this.config.bucketName,
objectKey,
r2Url,
visibility: options?.visibility || "private",
userId,
metadata,
tags: options?.tags,
uploadedAt: new Date(),
};
const result: UploadResult = {
success: true,
file: fileMetadata,
uploadId: fileId,
};
logger.info(
`Successfully uploaded file ${fileId} for user ${userId}`,
);
return { data: result };
} catch (error) {
logger.error("File upload failed:", error);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Upload failed",
description: "Failed to upload file to storage",
detail: "S3 upload operation failed",
},
error,
),
};
}
}
/**
* Generate presigned URL for direct upload
*/
async generatePresignedUploadUrl(
objectKey: string,
mimeType: string,
expiresIn: number = 3600,
): Promise<Result<PresignedUrlResult>> {
try {
const command = new PutObjectCommand({
Bucket: this.config.bucketName,
Key: objectKey,
ContentType: mimeType,
});
const uploadUrl = await getSignedUrl(this.s3Client, command, {
expiresIn,
});
const result: PresignedUrlResult = {
uploadUrl,
expiresIn,
};
logger.info(`Generated presigned URL for ${objectKey}`);
return { data: result };
} catch (error) {
logger.error("Failed to generate presigned URL:", error);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Failed to generate presigned URL",
description: "Could not create upload URL",
detail: "S3 presigned URL generation failed",
},
error,
),
};
}
}
/**
* Get file from R2
*/
async getFile(objectKey: string): Promise<Result<Buffer>> {
try {
const command = new GetObjectCommand({
Bucket: this.config.bucketName,
Key: objectKey,
});
const response = await this.s3Client.send(command);
const body = response.Body;
if (!body) {
return {
error: getError({
code: ERROR_CODES.NOT_FOUND,
message: "File not found",
description: "The requested file does not exist",
detail: "S3 response body is empty",
}),
};
}
// Handle different response body types
if (body instanceof Uint8Array) {
return { data: Buffer.from(body) };
}
// For Node.js Readable streams (AWS SDK v3)
if (typeof body.transformToByteArray === "function") {
const byteArray = await body.transformToByteArray();
return { data: Buffer.from(byteArray) };
}
// Fallback: treat as readable stream
const chunks: Buffer[] = [];
// Type assertion to handle the stream properly
const stream = body as NodeJS.ReadableStream;
return new Promise<Result<Buffer>>((resolve, reject) => {
stream.on("data", (chunk: Buffer) => {
chunks.push(chunk);
});
stream.on("end", () => {
const buffer = Buffer.concat(chunks);
logger.info(`Successfully retrieved file ${objectKey}`);
resolve({ data: buffer });
});
stream.on("error", (error) => {
reject(error);
});
});
} catch (error) {
logger.error(`Failed to get file ${objectKey}:`, error);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Failed to get file",
description: "Could not retrieve file from storage",
detail: "S3 get operation failed",
},
error,
),
};
}
}
/**
* Delete file from R2
*/
async deleteFile(objectKey: string): Promise<Result<boolean>> {
try {
const command = new DeleteObjectCommand({
Bucket: this.config.bucketName,
Key: objectKey,
});
await this.s3Client.send(command);
logger.info(`Successfully deleted file ${objectKey}`);
return { data: true };
} catch (error) {
logger.error(`Failed to delete file ${objectKey}:`, error);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Failed to delete file",
description: "Could not delete file from storage",
detail: "S3 delete operation failed",
},
error,
),
};
}
}
/**
* Get file metadata from R2
*/
async getFileMetadata(
objectKey: string,
): Promise<Result<Record<string, any>>> {
try {
const command = new HeadObjectCommand({
Bucket: this.config.bucketName,
Key: objectKey,
});
const response = await this.s3Client.send(command);
const metadata = {
size: response.ContentLength,
lastModified: response.LastModified,
contentType: response.ContentType,
metadata: response.Metadata || {},
};
logger.info(`Successfully retrieved metadata for ${objectKey}`);
return { data: metadata };
} catch (error) {
logger.error(
`Failed to get file metadata for ${objectKey}:`,
error,
);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Failed to get file metadata",
description: "Could not retrieve file information",
detail: "S3 head operation failed",
},
error,
),
};
}
}
/**
* Check if a file exists in R2
*/
async fileExists(objectKey: string): Promise<Result<boolean>> {
try {
const command = new HeadObjectCommand({
Bucket: this.config.bucketName,
Key: objectKey,
});
await this.s3Client.send(command);
return { data: true };
} catch (error: any) {
if (
error.name === "NotFound" ||
error.$metadata?.httpStatusCode === 404
) {
return { data: false };
}
logger.error(
`Failed to check file existence for ${objectKey}:`,
error,
);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Failed to check file existence",
description: "Could not verify if file exists",
detail: "S3 head operation failed",
},
error,
),
};
}
}
/**
* Copy file within R2
*/
async copyFile(
sourceKey: string,
destinationKey: string,
): Promise<Result<boolean>> {
try {
const command = new CopyObjectCommand({
Bucket: this.config.bucketName,
Key: destinationKey,
CopySource: `${this.config.bucketName}/${sourceKey}`,
});
await this.s3Client.send(command);
logger.info(
`Successfully copied file from ${sourceKey} to ${destinationKey}`,
);
return { data: true };
} catch (error) {
logger.error(
`Failed to copy file from ${sourceKey} to ${destinationKey}:`,
error,
);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Failed to copy file",
description: "Could not copy file in storage",
detail: "S3 copy operation failed",
},
error,
),
};
}
}
}

View File

@@ -0,0 +1,154 @@
import * as v from "valibot";
// File Upload Configuration Schema
export const fileUploadConfigSchema = v.object({
bucketName: v.string(),
region: v.string(),
endpoint: v.string(),
accessKey: v.string(),
secretKey: v.string(),
publicUrl: v.optional(v.string()),
maxFileSize: v.pipe(v.number(), v.integer()), // in bytes
allowedMimeTypes: v.array(v.string()),
allowedExtensions: v.array(v.string()),
});
export type FileUploadConfig = v.InferOutput<typeof fileUploadConfigSchema>;
// File Visibility Schema
export const fileVisibilitySchema = v.picklist([
"public",
"private",
"restricted",
]);
export type FileVisibility = v.InferOutput<typeof fileVisibilitySchema>;
// File Metadata Schema
export const fileMetadataSchema = v.object({
id: v.string(),
filename: v.string(),
originalName: v.string(),
mimeType: v.string(),
size: v.pipe(v.number(), v.integer()),
hash: v.string(),
bucketName: v.string(),
objectKey: v.string(),
r2Url: v.string(),
visibility: fileVisibilitySchema,
userId: v.string(),
metadata: v.optional(v.record(v.string(), v.any())),
tags: v.optional(v.array(v.string())),
uploadedAt: v.date(),
expiresAt: v.optional(v.date()),
});
export type FileMetadata = v.InferOutput<typeof fileMetadataSchema>;
// Upload Result Schema
export const uploadResultSchema = v.object({
success: v.boolean(),
file: v.optional(fileMetadataSchema),
error: v.optional(v.string()),
uploadId: v.optional(v.string()),
});
export type UploadResult = v.InferOutput<typeof uploadResultSchema>;
// Presigned URL Result Schema
export const presignedUrlResultSchema = v.object({
uploadUrl: v.string(),
downloadUrl: v.optional(v.string()),
expiresIn: v.pipe(v.number(), v.integer()),
fields: v.optional(v.record(v.string(), v.any())),
});
export type PresignedUrlResult = v.InferOutput<typeof presignedUrlResultSchema>;
// File Validation Result Schema
export const fileValidationResultSchema = v.object({
isValid: v.boolean(),
errors: v.array(v.string()),
warnings: v.array(v.string()),
});
export type FileValidationResult = v.InferOutput<
typeof fileValidationResultSchema
>;
// Image Resize Options Schema
export const imageResizeOptionsSchema = v.object({
width: v.optional(v.pipe(v.number(), v.integer())),
height: v.optional(v.pipe(v.number(), v.integer())),
fit: v.optional(
v.picklist(["cover", "contain", "fill", "inside", "outside"]),
),
});
export type ImageResizeOptions = v.InferOutput<typeof imageResizeOptionsSchema>;
// Thumbnail Size Schema
export const thumbnailSizeSchema = v.object({
width: v.pipe(v.number(), v.integer()),
height: v.pipe(v.number(), v.integer()),
});
export type ThumbnailSize = v.InferOutput<typeof thumbnailSizeSchema>;
// Image Processing Options Schema
export const imageProcessingOptionsSchema = v.object({
resize: v.optional(imageResizeOptionsSchema),
format: v.optional(v.picklist(["jpeg", "png", "webp", "avif"])),
quality: v.optional(v.pipe(v.number(), v.integer())),
generateThumbnail: v.optional(v.boolean()),
thumbnailSize: v.optional(thumbnailSizeSchema),
});
export type ImageProcessingOptions = v.InferOutput<
typeof imageProcessingOptionsSchema
>;
// File Processing Result Schema
export const fileProcessingResultSchema = v.object({
processed: v.boolean(),
originalFile: v.optional(v.instance(Uint8Array)), // Buffer equivalent
processedFile: v.optional(v.instance(Uint8Array)), // Buffer equivalent
thumbnail: v.optional(v.instance(Uint8Array)), // Buffer equivalent
metadata: v.optional(v.record(v.string(), v.any())),
error: v.optional(v.string()),
});
export type FileProcessingResult = v.InferOutput<
typeof fileProcessingResultSchema
>;
// File Security Result Schema (from utils.ts)
export const fileSecurityResultSchema = v.object({
isSecure: v.boolean(),
issues: v.array(v.string()),
warnings: v.array(v.string()),
});
export type FileSecurityResult = v.InferOutput<typeof fileSecurityResultSchema>;
// Document Processing Options Schema
export const documentProcessingOptionsSchema = v.object({
extractText: v.optional(v.boolean()),
generatePreview: v.optional(v.boolean()),
extractMetadata: v.optional(v.boolean()),
validateStructure: v.optional(v.boolean()),
});
export type DocumentProcessingOptions = v.InferOutput<
typeof documentProcessingOptionsSchema
>;
// Video Processing Options Schema
export const videoProcessingOptionsSchema = v.object({
generateThumbnail: v.optional(v.boolean()),
extractMetadata: v.optional(v.boolean()),
thumbnailTimestamp: v.optional(v.number()), // Seconds into video for thumbnail
thumbnailSize: v.optional(thumbnailSizeSchema),
});
export type VideoProcessingOptions = v.InferOutput<
typeof videoProcessingOptionsSchema
>;
// Upload Options Schema (used in client.ts)
export const uploadOptionsSchema = v.object({
visibility: v.optional(fileVisibilitySchema),
metadata: v.optional(v.record(v.string(), v.any())),
tags: v.optional(v.array(v.string())),
processImage: v.optional(v.boolean()),
processDocument: v.optional(v.boolean()),
processVideo: v.optional(v.boolean()),
});
export type UploadOptions = v.InferOutput<typeof uploadOptionsSchema>;

View File

@@ -0,0 +1,132 @@
import { createHash } from "crypto";
import type { DocumentProcessingOptions, FileProcessingResult } from "../data";
/**
* Process documents (PDF, text files, etc.)
*/
export async function processDocument(
buffer: Buffer | Uint8Array,
mimeType: string,
options: DocumentProcessingOptions = {},
): Promise<FileProcessingResult> {
try {
const inputBuffer = Buffer.from(buffer);
const metadata: Record<string, any> = {};
// Basic document information
metadata.original = {
size: inputBuffer.length,
mimeType,
hash: createHash("sha256").update(inputBuffer).digest("hex"),
};
// Process based on document type
if (mimeType === "application/pdf") {
return await processPDF(inputBuffer, options);
} else if (mimeType.startsWith("text/")) {
return await processTextFile(inputBuffer, options);
} else {
return await processGenericDocument(inputBuffer, options);
}
} catch (error) {
return {
processed: false,
error: `Document processing failed: ${error instanceof Error ? error.message : String(error)}`,
};
}
}
async function processPDF(
buffer: Buffer,
options: DocumentProcessingOptions,
): Promise<FileProcessingResult> {
const metadata: Record<string, any> = {
type: "pdf",
processed: true,
};
// In a real implementation, you would use a PDF library like pdf2pic or pdf-parse
// For now, we'll just provide basic processing
if (options.extractMetadata) {
// Extract PDF metadata (page count, author, title, etc.)
metadata.pdf = {
// This would be extracted using a PDF library
pageCount: 1, // Placeholder
title: "Unknown",
author: "Unknown",
creationDate: new Date().toISOString(),
};
}
if (options.extractText) {
// Extract text content from PDF
metadata.textContent = {
extracted: true,
characterCount: 0, // Placeholder
wordCount: 0, // Placeholder
};
}
if (options.generatePreview) {
// Generate thumbnail/preview of first page
metadata.preview = {
generated: true,
format: "png",
};
}
return {
processed: true,
originalFile: buffer,
processedFile: buffer, // PDFs typically don't need processing
metadata,
};
}
async function processTextFile(
buffer: Buffer,
options: DocumentProcessingOptions,
): Promise<FileProcessingResult> {
const text = buffer.toString("utf-8");
const metadata: Record<string, any> = {
type: "text",
processed: true,
};
if (options.extractText || options.extractMetadata) {
const lines = text.split("\n");
const words = text.split(/\s+/).filter((word) => word.length > 0);
metadata.textAnalysis = {
characterCount: text.length,
wordCount: words.length,
lineCount: lines.length,
encoding: "utf-8",
};
}
return {
processed: true,
originalFile: buffer,
processedFile: buffer,
metadata,
};
}
async function processGenericDocument(
buffer: Buffer,
options: DocumentProcessingOptions,
): Promise<FileProcessingResult> {
const metadata: Record<string, any> = {
type: "generic",
processed: true,
};
return {
processed: true,
originalFile: buffer,
processedFile: buffer,
metadata,
};
}

View File

@@ -0,0 +1,286 @@
import sharp from "sharp";
import type { FileProcessingResult, ImageProcessingOptions } from "../data";
/**
* Process images with compression, resizing, format conversion, and thumbnail generation
*/
export async function processImage(
buffer: Buffer | Uint8Array,
options: ImageProcessingOptions = {},
): Promise<FileProcessingResult> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
let processedBuffer = inputBuffer;
let thumbnailBuffer: Buffer | undefined;
const metadata: Record<string, any> = {};
// Initialize Sharp instance
const image = sharp(inputBuffer);
const originalMetadata = await image.metadata();
// Store original metadata
metadata.original = {
width: originalMetadata.width,
height: originalMetadata.height,
format: originalMetadata.format,
size: inputBuffer.length,
colorSpace: originalMetadata.space,
channels: originalMetadata.channels,
density: originalMetadata.density,
hasAlpha: originalMetadata.hasAlpha,
};
// Apply transformations
let transformedImage = image;
// Resize if requested
if (options.resize) {
const { width, height, fit = "cover" } = options.resize;
transformedImage = transformedImage.resize(width, height, {
fit: fit as keyof sharp.FitEnum,
withoutEnlargement: true, // Don't enlarge smaller images
});
metadata.processed = {
...metadata.processed,
resized: true,
targetWidth: width,
targetHeight: height,
fit,
};
}
// Apply format conversion and quality settings
const outputFormat = options.format || "webp";
const quality = options.quality || 85;
switch (outputFormat) {
case "jpeg":
transformedImage = transformedImage.jpeg({
quality,
progressive: true,
mozjpeg: true, // Use mozjpeg encoder for better compression
});
break;
case "png":
transformedImage = transformedImage.png({
quality,
compressionLevel: 9,
progressive: true,
});
break;
case "webp":
transformedImage = transformedImage.webp({
quality,
effort: 6, // Max compression effort
});
break;
case "avif":
transformedImage = transformedImage.avif({
quality,
effort: 6,
});
break;
default:
// Keep original format but apply quality if possible
if (originalMetadata.format === "jpeg") {
transformedImage = transformedImage.jpeg({ quality });
} else if (originalMetadata.format === "png") {
transformedImage = transformedImage.png({ quality });
}
}
// Generate processed image
processedBuffer = await transformedImage.toBuffer();
// Get final metadata
const finalMetadata = await sharp(processedBuffer).metadata();
metadata.processed = {
...metadata.processed,
width: finalMetadata.width,
height: finalMetadata.height,
format: outputFormat,
size: processedBuffer.length,
quality,
compressionRatio: inputBuffer.length / processedBuffer.length,
};
// Generate thumbnail if requested
if (options.generateThumbnail) {
const thumbSize = options.thumbnailSize || {
width: 300,
height: 300,
};
thumbnailBuffer = await sharp(inputBuffer)
.resize(thumbSize.width, thumbSize.height, {
fit: "cover",
position: "center",
})
.webp({ quality: 80 })
.toBuffer();
const thumbMetadata = await sharp(thumbnailBuffer).metadata();
metadata.thumbnail = {
width: thumbMetadata.width,
height: thumbMetadata.height,
format: "webp",
size: thumbnailBuffer.length,
};
}
// Add processing stats
metadata.processing = {
processedAt: new Date().toISOString(),
sizeSaving: inputBuffer.length - processedBuffer.length,
sizeSavingPercentage:
((inputBuffer.length - processedBuffer.length) /
inputBuffer.length) *
100,
processingTime: Date.now(), // You'd measure this properly in production
};
return {
processed: true,
originalFile: inputBuffer,
processedFile: processedBuffer,
thumbnail: thumbnailBuffer,
metadata,
};
} catch (error) {
return {
processed: false,
error: `Image processing failed: ${error instanceof Error ? error.message : String(error)}`,
};
}
}
/**
* Extract image metadata without processing
*/
export async function extractImageMetadata(
buffer: Buffer | Uint8Array,
): Promise<Record<string, any>> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
const image = sharp(inputBuffer);
const metadata = await image.metadata();
return {
width: metadata.width,
height: metadata.height,
format: metadata.format,
size: inputBuffer.length,
colorSpace: metadata.space,
channels: metadata.channels,
density: metadata.density,
hasAlpha: metadata.hasAlpha,
isAnimated: metadata.pages && metadata.pages > 1,
orientation: metadata.orientation,
};
} catch (error) {
throw new Error(
`Failed to extract image metadata: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Generate multiple sizes for responsive images
*/
export async function generateResponsiveSizes(
buffer: Buffer | Uint8Array,
sizes: Array<{ name: string; width: number; height?: number }> = [
{ name: "small", width: 400 },
{ name: "medium", width: 800 },
{ name: "large", width: 1200 },
{ name: "xlarge", width: 1920 },
],
): Promise<Record<string, Buffer>> {
const results: Record<string, Buffer> = {};
const inputBuffer = Buffer.isBuffer(buffer) ? buffer : Buffer.from(buffer);
try {
for (const size of sizes) {
const resized = await sharp(inputBuffer)
.resize(size.width, size.height, {
fit: "inside",
withoutEnlargement: true,
})
.webp({ quality: 85 })
.toBuffer();
results[size.name] = resized;
}
return results;
} catch (error) {
throw new Error(
`Failed to generate responsive sizes: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Create an optimized avatar image
*/
export async function processAvatar(
buffer: Buffer | Uint8Array,
size: number = 200,
): Promise<Buffer> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
return await sharp(inputBuffer)
.resize(size, size, { fit: "cover", position: "center" })
.webp({ quality: 90 })
.toBuffer();
} catch (error) {
throw new Error(
`Avatar processing failed: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Remove EXIF data from images for privacy
*/
export async function stripExifData(
buffer: Buffer | Uint8Array,
): Promise<Buffer> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
return await sharp(inputBuffer)
.rotate() // Auto-rotate based on EXIF, then removes EXIF
.toBuffer();
} catch (error) {
throw new Error(
`EXIF stripping failed: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Validate if buffer contains a valid image
*/
export async function validateImage(
buffer: Buffer | Uint8Array,
): Promise<boolean> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
const metadata = await sharp(inputBuffer).metadata();
return !!(metadata.width && metadata.height && metadata.format);
} catch {
return false;
}
}

View File

@@ -0,0 +1,3 @@
export * from "./document-processor";
export * from "./image-processor";
export * from "./video-processor";

View File

@@ -0,0 +1,62 @@
import { createHash } from "crypto";
import type { FileProcessingResult, VideoProcessingOptions } from "../data";
/**
* Process video files (extract metadata, generate thumbnails)
* Note: This is a basic implementation. For production use, you'd want to use FFmpeg
*/
export async function processVideo(
buffer: Buffer | Uint8Array,
mimeType: string,
options: VideoProcessingOptions = {},
): Promise<FileProcessingResult> {
try {
const inputBuffer = Buffer.from(buffer);
const metadata: Record<string, any> = {};
// Basic video information
metadata.original = {
size: inputBuffer.length,
mimeType,
hash: createHash("sha256").update(inputBuffer).digest("hex"),
};
// For a real implementation, you would use FFmpeg through a library like fluent-ffmpeg
// This is a placeholder implementation
if (options.extractMetadata) {
metadata.video = {
// These would be extracted using FFmpeg
duration: 0, // seconds
width: 1920, // placeholder
height: 1080, // placeholder
framerate: 30, // placeholder
bitrate: 5000000, // placeholder
codec: "h264", // placeholder
};
}
if (options.generateThumbnail) {
// Generate video thumbnail at specified timestamp
// This would use FFmpeg to extract a frame
metadata.thumbnail = {
generated: true,
timestamp: options.thumbnailTimestamp || 0,
format: "jpeg",
size: options.thumbnailSize || { width: 640, height: 360 },
};
}
return {
processed: true,
originalFile: inputBuffer,
processedFile: inputBuffer, // Videos are typically not re-encoded during upload
metadata,
};
} catch (error) {
return {
processed: false,
error: `Video processing failed: ${error instanceof Error ? error.message : String(error)}`,
};
}
}

View File

@@ -0,0 +1,186 @@
import { lookup } from "mime-types";
import { createHash } from "crypto";
/**
* Generate a secure file hash for deduplication
*/
export function generateFileHash(buffer: Buffer | Uint8Array): string {
return createHash("sha256").update(buffer).digest("hex");
}
/**
* Generate a unique filename with timestamp and random suffix
*/
export function generateUniqueFilename(
originalName: string,
userId?: string,
): string {
const fileId = crypto.randomUUID();
const timestamp = Date.now();
const extension = getFileExtension(originalName);
const baseName = originalName.replace(`.${extension}`, "").slice(0, 50); // Limit length
const sanitizedBaseName = sanitizeFilename(baseName);
const userPrefix = userId ? `${userId.slice(0, 8)}_` : "";
return `${userPrefix}${timestamp}_${sanitizedBaseName}_${fileId}.${extension}`;
}
/**
* Sanitize filename for safe storage
*/
export function sanitizeFilename(filename: string): string {
return filename
.replace(/[^a-zA-Z0-9._-]/g, "_") // Replace unsafe characters
.replace(/_{2,}/g, "_") // Remove multiple underscores
.replace(/^_+|_+$/g, "") // Remove leading/trailing underscores
.toLowerCase();
}
/**
* Get file extension from filename
*/
export function getFileExtension(filename: string): string {
const parts = filename.split(".");
return parts.length > 1 ? parts.pop()!.toLowerCase() : "";
}
/**
* Get MIME type from filename
*/
export function getMimeTypeFromFilename(filename: string): string | null {
return lookup(filename) || null;
}
/**
* Format file size in human readable format
*/
export function formatFileSize(bytes: number): string {
const sizes = ["Bytes", "KB", "MB", "GB", "TB"];
if (bytes === 0) return "0 Bytes";
const i = Math.floor(Math.log(bytes) / Math.log(1024));
return `${Math.round((bytes / Math.pow(1024, i)) * 100) / 100} ${sizes[i]}`;
}
/**
* Check if file type is an image
*/
export function isImageFile(mimeType: string): boolean {
return mimeType.startsWith("image/");
}
/**
* Check if file type is a video
*/
export function isVideoFile(mimeType: string): boolean {
return mimeType.startsWith("video/");
}
/**
* Check if file type is a document
*/
export function isDocumentFile(mimeType: string): boolean {
const documentTypes = [
"application/pdf",
"application/msword",
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
"application/vnd.ms-excel",
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
"text/plain",
"text/csv",
"application/rtf",
];
return documentTypes.includes(mimeType) || mimeType.startsWith("text/");
}
/**
* Generate object key for R2 storage
*/
export function generateObjectKey(
userId: string,
filename: string,
category: string = "uploads",
): string {
const date = new Date();
const year = date.getFullYear();
const month = String(date.getMonth() + 1).padStart(2, "0");
const day = String(date.getDate()).padStart(2, "0");
return `${category}/${userId}/${year}-${month}-${day}/${filename}`;
}
/**
* Validate file against security rules
*/
export interface FileSecurityResult {
isSecure: boolean;
issues: string[];
warnings: string[];
}
export function validateFileSecurity(
buffer: Buffer | Uint8Array,
filename: string,
mimeType: string,
): FileSecurityResult {
const issues: string[] = [];
const warnings: string[] = [];
// Check for malicious file extensions
const dangerousExtensions = [
"exe",
"bat",
"cmd",
"com",
"pif",
"scr",
"vbs",
"js",
"jar",
"app",
"deb",
"pkg",
"dmg",
"rpm",
];
const extension = getFileExtension(filename);
if (dangerousExtensions.includes(extension)) {
issues.push(`Potentially dangerous file extension: .${extension}`);
}
// Check for suspicious filename patterns
if (
filename.includes("..") ||
filename.includes("/") ||
filename.includes("\\")
) {
issues.push("Filename contains path traversal characters");
}
// Check for null bytes
if (filename.includes("\0")) {
issues.push("Filename contains null bytes");
}
// Check file size (basic DoS protection)
if (buffer.length === 0) {
issues.push("File is empty");
}
// Check for MIME type spoofing
const expectedMimeType = getMimeTypeFromFilename(filename);
if (expectedMimeType && expectedMimeType !== mimeType) {
warnings.push(
`MIME type mismatch: expected ${expectedMimeType}, got ${mimeType}`,
);
}
return {
isSecure: issues.length === 0,
issues,
warnings,
};
}

View File

@@ -0,0 +1,81 @@
import type { FileUploadConfig, FileValidationResult } from "./data";
import { lookup } from "mime-types";
export function validateFile(
file: Buffer | Uint8Array,
originalName: string,
mimeType: string,
config: FileUploadConfig,
): FileValidationResult {
const errors: string[] = [];
const warnings: string[] = [];
// Check file size
if (file.length > config.maxFileSize) {
errors.push(
`File size ${file.length} exceeds maximum allowed size of ${config.maxFileSize} bytes`,
);
}
// Check MIME type
if (!config.allowedMimeTypes.includes(mimeType)) {
errors.push(
`MIME type ${mimeType} is not allowed. Allowed types: ${config.allowedMimeTypes.join(", ")}`,
);
}
// Check file extension
const extension = originalName.split(".").pop()?.toLowerCase();
if (!extension || !config.allowedExtensions.includes(extension)) {
errors.push(
`File extension .${extension} is not allowed. Allowed extensions: ${config.allowedExtensions.join(", ")}`,
);
}
// Verify MIME type matches file extension
const expectedMimeType = lookup(originalName);
if (expectedMimeType && expectedMimeType !== mimeType) {
warnings.push(
`MIME type ${mimeType} doesn't match expected type ${expectedMimeType} for file ${originalName}`,
);
}
// Check for empty file
if (file.length === 0) {
errors.push("File is empty");
}
// Basic file signature validation for common types
if (mimeType.startsWith("image/")) {
const isValidImage = validateImageSignature(file, mimeType);
if (!isValidImage) {
errors.push("Invalid image file signature");
}
}
return {
isValid: errors.length === 0,
errors,
warnings,
};
}
function validateImageSignature(
file: Buffer | Uint8Array,
mimeType: string,
): boolean {
const buffer = Buffer.from(file);
// Check basic image signatures
const signatures = {
"image/jpeg": [0xff, 0xd8, 0xff],
"image/png": [0x89, 0x50, 0x4e, 0x47],
"image/gif": [0x47, 0x49, 0x46],
"image/webp": [0x52, 0x49, 0x46, 0x46],
};
const signature = signatures[mimeType as keyof typeof signatures];
if (!signature) return true; // Skip validation for unknown types
return signature.every((byte, index) => buffer[index] === byte);
}