added file domain logic, updated drizzle package

This commit is contained in:
user
2026-03-01 05:56:15 +02:00
parent 1c2584df58
commit 5a5f565377
27 changed files with 5757 additions and 223 deletions

View File

@@ -0,0 +1,55 @@
CREATE TABLE IF NOT EXISTS "file" (
"id" text PRIMARY KEY NOT NULL,
"filename" text NOT NULL,
"original_name" text NOT NULL,
"mime_type" text NOT NULL,
"size" integer NOT NULL,
"hash" text NOT NULL,
"bucket_name" text NOT NULL,
"object_key" text NOT NULL,
"r2_url" text NOT NULL,
"metadata" json,
"tags" json,
"visibility" varchar(16) DEFAULT 'private' NOT NULL,
"user_id" text NOT NULL,
"status" varchar(16) DEFAULT 'processing' NOT NULL,
"processing_error" text,
"uploaded_at" timestamp NOT NULL,
"last_accessed_at" timestamp,
"expires_at" timestamp,
"created_at" timestamp NOT NULL,
"updated_at" timestamp NOT NULL
);
--> statement-breakpoint
CREATE TABLE IF NOT EXISTS "file_access" (
"id" serial PRIMARY KEY NOT NULL,
"file_id" text NOT NULL,
"user_id" text NOT NULL,
"can_read" boolean DEFAULT false NOT NULL,
"can_write" boolean DEFAULT false NOT NULL,
"can_delete" boolean DEFAULT false NOT NULL,
"can_share" boolean DEFAULT false NOT NULL,
"accessed_at" timestamp,
"granted_at" timestamp NOT NULL,
"expires_at" timestamp,
"created_at" timestamp NOT NULL,
"updated_at" timestamp NOT NULL
);
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "file" ADD CONSTRAINT "file_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "file_access" ADD CONSTRAINT "file_access_file_id_file_id_fk" FOREIGN KEY ("file_id") REFERENCES "public"."file"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
--> statement-breakpoint
DO $$ BEGIN
ALTER TABLE "file_access" ADD CONSTRAINT "file_access_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;
EXCEPTION
WHEN duplicate_object THEN null;
END $$;

View File

@@ -0,0 +1,920 @@
{
"id": "32691d1d-382d-4db0-96ca-a49c46ece173",
"prevId": "333bfb88-9996-4dab-bbf5-724a6eadd745",
"version": "7",
"dialect": "postgresql",
"tables": {
"public.two_factor": {
"name": "two_factor",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true
},
"secret": {
"name": "secret",
"type": "text",
"primaryKey": false,
"notNull": true
},
"backup_codes": {
"name": "backup_codes",
"type": "json",
"primaryKey": false,
"notNull": false
},
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
},
"updated_at": {
"name": "updated_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
}
},
"indexes": {},
"foreignKeys": {
"two_factor_user_id_user_id_fk": {
"name": "two_factor_user_id_user_id_fk",
"tableFrom": "two_factor",
"tableTo": "user",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.twofa_sessions": {
"name": "twofa_sessions",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true
},
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"session_id": {
"name": "session_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"verification_token": {
"name": "verification_token",
"type": "text",
"primaryKey": false,
"notNull": true
},
"code_used": {
"name": "code_used",
"type": "text",
"primaryKey": false,
"notNull": false
},
"status": {
"name": "status",
"type": "varchar(16)",
"primaryKey": false,
"notNull": true
},
"attempts": {
"name": "attempts",
"type": "integer",
"primaryKey": false,
"notNull": true,
"default": 0
},
"max_attempts": {
"name": "max_attempts",
"type": "integer",
"primaryKey": false,
"notNull": true,
"default": 5
},
"verified_at": {
"name": "verified_at",
"type": "timestamp",
"primaryKey": false,
"notNull": false
},
"expires_at": {
"name": "expires_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
},
"ip_address": {
"name": "ip_address",
"type": "text",
"primaryKey": false,
"notNull": false,
"default": "''"
},
"user_agent": {
"name": "user_agent",
"type": "text",
"primaryKey": false,
"notNull": false,
"default": "''"
}
},
"indexes": {},
"foreignKeys": {
"twofa_sessions_user_id_user_id_fk": {
"name": "twofa_sessions_user_id_user_id_fk",
"tableFrom": "twofa_sessions",
"tableTo": "user",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {
"twofa_sessions_verification_token_unique": {
"name": "twofa_sessions_verification_token_unique",
"nullsNotDistinct": false,
"columns": [
"verification_token"
]
}
},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.account": {
"name": "account",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true
},
"account_id": {
"name": "account_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"provider_id": {
"name": "provider_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"access_token": {
"name": "access_token",
"type": "text",
"primaryKey": false,
"notNull": false
},
"refresh_token": {
"name": "refresh_token",
"type": "text",
"primaryKey": false,
"notNull": false
},
"id_token": {
"name": "id_token",
"type": "text",
"primaryKey": false,
"notNull": false
},
"access_token_expires_at": {
"name": "access_token_expires_at",
"type": "timestamp",
"primaryKey": false,
"notNull": false
},
"refresh_token_expires_at": {
"name": "refresh_token_expires_at",
"type": "timestamp",
"primaryKey": false,
"notNull": false
},
"scope": {
"name": "scope",
"type": "text",
"primaryKey": false,
"notNull": false
},
"password": {
"name": "password",
"type": "text",
"primaryKey": false,
"notNull": false
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"updated_at": {
"name": "updated_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
}
},
"indexes": {
"account_userId_idx": {
"name": "account_userId_idx",
"columns": [
{
"expression": "user_id",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": false,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {
"account_user_id_user_id_fk": {
"name": "account_user_id_user_id_fk",
"tableFrom": "account",
"tableTo": "user",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.user": {
"name": "user",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true
},
"name": {
"name": "name",
"type": "text",
"primaryKey": false,
"notNull": true
},
"email": {
"name": "email",
"type": "text",
"primaryKey": false,
"notNull": true
},
"email_verified": {
"name": "email_verified",
"type": "boolean",
"primaryKey": false,
"notNull": true,
"default": false
},
"image": {
"name": "image",
"type": "text",
"primaryKey": false,
"notNull": false
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"updated_at": {
"name": "updated_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"username": {
"name": "username",
"type": "text",
"primaryKey": false,
"notNull": false
},
"display_username": {
"name": "display_username",
"type": "text",
"primaryKey": false,
"notNull": false
},
"role": {
"name": "role",
"type": "text",
"primaryKey": false,
"notNull": false
},
"banned": {
"name": "banned",
"type": "boolean",
"primaryKey": false,
"notNull": false,
"default": false
},
"ban_reason": {
"name": "ban_reason",
"type": "text",
"primaryKey": false,
"notNull": false
},
"ban_expires": {
"name": "ban_expires",
"type": "timestamp",
"primaryKey": false,
"notNull": false
},
"onboarding_done": {
"name": "onboarding_done",
"type": "boolean",
"primaryKey": false,
"notNull": false,
"default": false
},
"last2_fa_verified_at": {
"name": "last2_fa_verified_at",
"type": "timestamp",
"primaryKey": false,
"notNull": false
},
"parent_id": {
"name": "parent_id",
"type": "text",
"primaryKey": false,
"notNull": false
}
},
"indexes": {},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {
"user_email_unique": {
"name": "user_email_unique",
"nullsNotDistinct": false,
"columns": [
"email"
]
},
"user_username_unique": {
"name": "user_username_unique",
"nullsNotDistinct": false,
"columns": [
"username"
]
}
},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.verification": {
"name": "verification",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true
},
"identifier": {
"name": "identifier",
"type": "text",
"primaryKey": false,
"notNull": true
},
"value": {
"name": "value",
"type": "text",
"primaryKey": false,
"notNull": true
},
"expires_at": {
"name": "expires_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
},
"updated_at": {
"name": "updated_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true,
"default": "now()"
}
},
"indexes": {
"verification_identifier_idx": {
"name": "verification_identifier_idx",
"columns": [
{
"expression": "identifier",
"isExpression": false,
"asc": true,
"nulls": "last"
}
],
"isUnique": false,
"concurrently": false,
"method": "btree",
"with": {}
}
},
"foreignKeys": {},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.file": {
"name": "file",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "text",
"primaryKey": true,
"notNull": true
},
"filename": {
"name": "filename",
"type": "text",
"primaryKey": false,
"notNull": true
},
"original_name": {
"name": "original_name",
"type": "text",
"primaryKey": false,
"notNull": true
},
"mime_type": {
"name": "mime_type",
"type": "text",
"primaryKey": false,
"notNull": true
},
"size": {
"name": "size",
"type": "integer",
"primaryKey": false,
"notNull": true
},
"hash": {
"name": "hash",
"type": "text",
"primaryKey": false,
"notNull": true
},
"bucket_name": {
"name": "bucket_name",
"type": "text",
"primaryKey": false,
"notNull": true
},
"object_key": {
"name": "object_key",
"type": "text",
"primaryKey": false,
"notNull": true
},
"r2_url": {
"name": "r2_url",
"type": "text",
"primaryKey": false,
"notNull": true
},
"metadata": {
"name": "metadata",
"type": "json",
"primaryKey": false,
"notNull": false
},
"tags": {
"name": "tags",
"type": "json",
"primaryKey": false,
"notNull": false
},
"visibility": {
"name": "visibility",
"type": "varchar(16)",
"primaryKey": false,
"notNull": true,
"default": "'private'"
},
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"status": {
"name": "status",
"type": "varchar(16)",
"primaryKey": false,
"notNull": true,
"default": "'processing'"
},
"processing_error": {
"name": "processing_error",
"type": "text",
"primaryKey": false,
"notNull": false
},
"uploaded_at": {
"name": "uploaded_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
},
"last_accessed_at": {
"name": "last_accessed_at",
"type": "timestamp",
"primaryKey": false,
"notNull": false
},
"expires_at": {
"name": "expires_at",
"type": "timestamp",
"primaryKey": false,
"notNull": false
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
},
"updated_at": {
"name": "updated_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
}
},
"indexes": {},
"foreignKeys": {
"file_user_id_user_id_fk": {
"name": "file_user_id_user_id_fk",
"tableFrom": "file",
"tableTo": "user",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.file_access": {
"name": "file_access",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "serial",
"primaryKey": true,
"notNull": true
},
"file_id": {
"name": "file_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"can_read": {
"name": "can_read",
"type": "boolean",
"primaryKey": false,
"notNull": true,
"default": false
},
"can_write": {
"name": "can_write",
"type": "boolean",
"primaryKey": false,
"notNull": true,
"default": false
},
"can_delete": {
"name": "can_delete",
"type": "boolean",
"primaryKey": false,
"notNull": true,
"default": false
},
"can_share": {
"name": "can_share",
"type": "boolean",
"primaryKey": false,
"notNull": true,
"default": false
},
"accessed_at": {
"name": "accessed_at",
"type": "timestamp",
"primaryKey": false,
"notNull": false
},
"granted_at": {
"name": "granted_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
},
"expires_at": {
"name": "expires_at",
"type": "timestamp",
"primaryKey": false,
"notNull": false
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
},
"updated_at": {
"name": "updated_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
}
},
"indexes": {},
"foreignKeys": {
"file_access_file_id_file_id_fk": {
"name": "file_access_file_id_file_id_fk",
"tableFrom": "file_access",
"tableTo": "file",
"columnsFrom": [
"file_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
},
"file_access_user_id_user_id_fk": {
"name": "file_access_user_id_user_id_fk",
"tableFrom": "file_access",
"tableTo": "user",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
"public.notifications": {
"name": "notifications",
"schema": "",
"columns": {
"id": {
"name": "id",
"type": "serial",
"primaryKey": true,
"notNull": true
},
"title": {
"name": "title",
"type": "text",
"primaryKey": false,
"notNull": true
},
"body": {
"name": "body",
"type": "text",
"primaryKey": false,
"notNull": true
},
"priority": {
"name": "priority",
"type": "varchar(12)",
"primaryKey": false,
"notNull": true,
"default": "'normal'"
},
"type": {
"name": "type",
"type": "varchar(12)",
"primaryKey": false,
"notNull": true
},
"category": {
"name": "category",
"type": "varchar(64)",
"primaryKey": false,
"notNull": false
},
"is_read": {
"name": "is_read",
"type": "boolean",
"primaryKey": false,
"notNull": true,
"default": false
},
"is_archived": {
"name": "is_archived",
"type": "boolean",
"primaryKey": false,
"notNull": true,
"default": false
},
"action_url": {
"name": "action_url",
"type": "text",
"primaryKey": false,
"notNull": false
},
"action_type": {
"name": "action_type",
"type": "varchar(16)",
"primaryKey": false,
"notNull": false
},
"action_data": {
"name": "action_data",
"type": "json",
"primaryKey": false,
"notNull": false
},
"icon": {
"name": "icon",
"type": "varchar(64)",
"primaryKey": false,
"notNull": false
},
"user_id": {
"name": "user_id",
"type": "text",
"primaryKey": false,
"notNull": true
},
"sent_at": {
"name": "sent_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
},
"read_at": {
"name": "read_at",
"type": "timestamp",
"primaryKey": false,
"notNull": false
},
"expires_at": {
"name": "expires_at",
"type": "timestamp",
"primaryKey": false,
"notNull": false
},
"created_at": {
"name": "created_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
},
"updated_at": {
"name": "updated_at",
"type": "timestamp",
"primaryKey": false,
"notNull": true
}
},
"indexes": {},
"foreignKeys": {
"notifications_user_id_user_id_fk": {
"name": "notifications_user_id_user_id_fk",
"tableFrom": "notifications",
"tableTo": "user",
"columnsFrom": [
"user_id"
],
"columnsTo": [
"id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
"uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
}
},
"enums": {},
"schemas": {},
"sequences": {},
"roles": {},
"policies": {},
"views": {},
"_meta": {
"columns": {},
"schemas": {},
"tables": {}
}
}

View File

@@ -8,6 +8,13 @@
"when": 1772288650927,
"tag": "0000_lucky_karma",
"breakpoints": true
},
{
"idx": 1,
"version": "7",
"when": 1772335785371,
"tag": "0001_silly_venus",
"breakpoints": true
}
]
}

View File

@@ -13,13 +13,13 @@
"dependencies": {
"@pkg/settings": "workspace:*",
"dotenv": "^16.4.7",
"drizzle-orm": "^0.36.1",
"drizzle-orm": "^0.45.1",
"postgres": "^3.4.8"
},
"devDependencies": {
"@types/bun": "latest",
"@types/pg": "^8.11.10",
"drizzle-kit": "^0.28.0"
"drizzle-kit": "^0.31.9"
},
"peerDependencies": {
"typescript": "^5.9.3"

View File

@@ -0,0 +1,88 @@
import {
boolean,
integer,
json,
pgTable,
serial,
text,
timestamp,
varchar,
} from "drizzle-orm/pg-core";
import { user } from "./better.auth.schema";
import { relations } from "drizzle-orm";
// Add this to your existing schema file
export const file = pgTable("file", {
id: text("id").primaryKey(), // UUID
// File Information
filename: text("filename").notNull(),
originalName: text("original_name").notNull(),
mimeType: text("mime_type").notNull(),
size: integer("size").notNull(), // in bytes
hash: text("hash").notNull(), // SHA-256 for deduplication
// R2 Storage Information
bucketName: text("bucket_name").notNull(),
objectKey: text("object_key").notNull(),
r2Url: text("r2_url").notNull(),
// Metadata
metadata: json("metadata").$type<Record<string, any>>(),
tags: json("tags").$type<string[]>(),
// Access Control
visibility: varchar("visibility", { length: 16 })
.default("private")
.notNull(), // "public", "private", "restricted"
userId: text("user_id")
.notNull()
.references(() => user.id, { onDelete: "cascade" }),
// File Status
status: varchar("status", { length: 16 }).default("processing").notNull(), // "processing", "ready", "error", "deleted"
processingError: text("processing_error"),
// Timestamps
uploadedAt: timestamp("uploaded_at").notNull(),
lastAccessedAt: timestamp("last_accessed_at"),
expiresAt: timestamp("expires_at"), // For temporary files
createdAt: timestamp("created_at").notNull(),
updatedAt: timestamp("updated_at").notNull(),
});
export const fileAccess = pgTable("file_access", {
id: serial("id").primaryKey(),
fileId: text("file_id")
.notNull()
.references(() => file.id, { onDelete: "cascade" }),
userId: text("user_id")
.notNull()
.references(() => user.id, { onDelete: "cascade" }),
// Access permissions
canRead: boolean("can_read").default(false).notNull(),
canWrite: boolean("can_write").default(false).notNull(),
canDelete: boolean("can_delete").default(false).notNull(),
canShare: boolean("can_share").default(false).notNull(),
// Access tracking
accessedAt: timestamp("accessed_at"),
grantedAt: timestamp("granted_at").notNull(),
expiresAt: timestamp("expires_at"),
createdAt: timestamp("created_at").notNull(),
updatedAt: timestamp("updated_at").notNull(),
});
// Relations
export const filesRelations = relations(file, ({ one, many }) => ({
owner: one(user, { fields: [file.userId], references: [user.id] }),
fileAccess: many(fileAccess),
}));
export const fileAccessRelations = relations(fileAccess, ({ one }) => ({
file: one(file, { fields: [fileAccess.fileId], references: [file.id] }),
user: one(user, { fields: [fileAccess.userId], references: [user.id] }),
}));

View File

@@ -1,3 +1,4 @@
export * from "./auth.schema";
export * from "./better.auth.schema";
export * from "./file.schema";
export * from "./general.schema";

View File

@@ -0,0 +1,252 @@
import {
FileFilters,
FileShareRequest,
FileUpdateRequest,
FileUploadRequest,
PaginationOptions,
PresignedUploadRequest,
} from "./data";
import { FlowExecCtx } from "@core/flow.execution.context";
import { StorageRepository } from "./storage.repository";
import { FileRepository } from "./repository";
import { settings } from "@core/settings";
import { ResultAsync } from "neverthrow";
import { traceResultAsync } from "@core/observability";
import { db } from "@pkg/db";
export class FileController {
constructor(
private fileRepo: FileRepository,
private storageRepo: StorageRepository,
private publicUrl: string,
) {}
getFiles(
fctx: FlowExecCtx,
filters: FileFilters,
pagination: PaginationOptions,
) {
return traceResultAsync({
name: "logic.files.controller.getFiles",
fctx,
attributes: { "app.user.id": filters.userId },
fn: () => this.fileRepo.getFiles(fctx, filters, pagination),
});
}
getFile(fctx: FlowExecCtx, fileId: string, userId: string) {
return traceResultAsync({
name: "logic.files.controller.getFile",
fctx,
attributes: { "app.user.id": userId, "app.file.id": fileId },
fn: () => this.fileRepo.getFileById(fctx, fileId, userId),
});
}
uploadFile(
fctx: FlowExecCtx,
userId: string,
file: globalThis.File,
uploadRequest: FileUploadRequest,
) {
return traceResultAsync({
name: "logic.files.controller.uploadFile",
fctx,
attributes: { "app.user.id": userId, "app.file.name": file.name },
fn: () =>
ResultAsync.fromPromise(file.arrayBuffer(), (error) => ({
code: "INTERNAL_ERROR",
message: "Failed to read file buffer",
description: "Please try again",
detail: error instanceof Error ? error.message : String(error),
}))
.map((arrayBuffer) => Buffer.from(arrayBuffer))
.andThen((buffer) =>
this.storageRepo.uploadFile(
fctx,
buffer,
file.name,
file.type,
userId,
{
visibility:
(uploadRequest.visibility as
| "public"
| "private") || "private",
metadata: uploadRequest.metadata,
tags: uploadRequest.tags,
processImage: uploadRequest.processImage,
processDocument: uploadRequest.processDocument,
processVideo: uploadRequest.processVideo,
},
),
)
.andThen((fileMetadata) =>
this.fileRepo
.createFile(fctx, {
id: fileMetadata.id,
filename: fileMetadata.filename,
originalName: fileMetadata.originalName,
mimeType: fileMetadata.mimeType,
size: fileMetadata.size,
hash: fileMetadata.hash,
bucketName: fileMetadata.bucketName,
objectKey: fileMetadata.objectKey,
r2Url: fileMetadata.r2Url,
visibility: fileMetadata.visibility,
userId: fileMetadata.userId,
metadata: fileMetadata.metadata,
tags: fileMetadata.tags
? [...fileMetadata.tags]
: undefined,
status: "ready",
uploadedAt: fileMetadata.uploadedAt,
})
.map((dbFile) => ({
success: true,
file: dbFile,
uploadId: fileMetadata.id,
})),
),
});
}
generatePresignedUrl(
fctx: FlowExecCtx,
userId: string,
bucketName: string,
request: PresignedUploadRequest,
) {
const fileId = crypto.randomUUID();
const extension = request.filename.split(".").pop() || "";
const filename = `${fileId}.${extension}`;
const objectKey = `uploads/${userId}/${filename}`;
return traceResultAsync({
name: "logic.files.controller.generatePresignedUrl",
fctx,
attributes: { "app.user.id": userId, "app.file.id": fileId },
fn: () =>
this.storageRepo
.generatePresignedUploadUrl(
fctx,
objectKey,
request.mimeType,
3600,
)
.andThen((presignedData) =>
this.fileRepo
.createFile(fctx, {
id: fileId,
filename,
originalName: request.filename,
mimeType: request.mimeType,
size: request.size,
hash: "",
bucketName,
objectKey,
r2Url: `${this.publicUrl}/${bucketName}/${objectKey}`,
visibility: request.visibility || "private",
userId,
status: "processing",
uploadedAt: new Date(),
})
.map(() => ({
...presignedData,
fileId,
objectKey,
})),
),
});
}
updateFile(
fctx: FlowExecCtx,
fileId: string,
userId: string,
updates: FileUpdateRequest,
) {
return traceResultAsync({
name: "logic.files.controller.updateFile",
fctx,
attributes: { "app.user.id": userId, "app.file.id": fileId },
fn: () => this.fileRepo.updateFile(fctx, fileId, userId, updates),
});
}
deleteFiles(fctx: FlowExecCtx, fileIds: readonly string[], userId: string) {
return traceResultAsync({
name: "logic.files.controller.deleteFiles",
fctx,
attributes: {
"app.user.id": userId,
"app.files.count": fileIds.length,
},
fn: () =>
ResultAsync.combine(
[...fileIds].map((fileId) =>
this.fileRepo.getFileById(fctx, fileId, userId),
),
)
.map((files) => files.map((file) => file.objectKey))
.andThen((objectKeys) =>
this.storageRepo.deleteFiles(fctx, objectKeys),
)
.andThen(() =>
this.fileRepo.deleteFiles(fctx, fileIds, userId),
),
});
}
shareFile(
fctx: FlowExecCtx,
fileId: string,
ownerId: string,
shareRequest: FileShareRequest,
) {
return traceResultAsync({
name: "logic.files.controller.shareFile",
fctx,
attributes: { "app.user.id": ownerId, "app.file.id": fileId },
fn: () => this.fileRepo.shareFile(fctx, fileId, ownerId, shareRequest),
});
}
updateFileStatus(
fctx: FlowExecCtx,
fileId: string,
status: string,
processingError?: string,
) {
return traceResultAsync({
name: "logic.files.controller.updateFileStatus",
fctx,
attributes: { "app.file.id": fileId },
fn: () =>
this.fileRepo.updateFileStatus(
fctx,
fileId,
status,
processingError,
),
});
}
}
export function getFileController(): FileController {
return new FileController(
new FileRepository(db),
new StorageRepository({
bucketName: settings.r2BucketName || "",
region: settings.r2Region || "",
endpoint: settings.r2Endpoint || "",
accessKey: settings.r2AccessKey || "",
secretKey: settings.r2SecretKey || "",
publicUrl: settings.r2PublicUrl || "",
maxFileSize: settings.maxFileSize,
allowedMimeTypes: settings.allowedMimeTypes,
allowedExtensions: settings.allowedExtensions,
}),
settings.r2PublicUrl || "",
);
}

View File

@@ -0,0 +1,147 @@
import * as v from "valibot";
export const fileSchema = v.object({
id: v.string(),
filename: v.string(),
originalName: v.string(),
mimeType: v.string(),
size: v.pipe(v.number(), v.integer()),
hash: v.string(),
bucketName: v.string(),
objectKey: v.string(),
r2Url: v.string(),
visibility: v.string(),
userId: v.string(),
metadata: v.optional(v.record(v.string(), v.any())),
tags: v.optional(v.array(v.string())),
status: v.string(),
processingError: v.optional(v.string()),
uploadedAt: v.date(),
lastAccessedAt: v.optional(v.date()),
expiresAt: v.optional(v.date()),
createdAt: v.date(),
updatedAt: v.date(),
});
export type File = v.InferOutput<typeof fileSchema>;
export type Files = File[];
export const fileUploadRequestSchema = v.object({
visibility: v.optional(v.string()),
metadata: v.optional(v.record(v.string(), v.any())),
tags: v.optional(v.array(v.string())),
processImage: v.optional(v.boolean()),
processDocument: v.optional(v.boolean()),
processVideo: v.optional(v.boolean()),
});
export type FileUploadRequest = v.InferOutput<typeof fileUploadRequestSchema>;
export const fileFiltersSchema = v.object({
userId: v.string(),
mimeType: v.optional(v.string()),
visibility: v.optional(v.string()),
status: v.optional(v.string()),
search: v.optional(v.string()),
tags: v.optional(v.array(v.string())),
});
export type FileFilters = v.InferOutput<typeof fileFiltersSchema>;
export const paginationOptionsSchema = v.object({
page: v.pipe(v.number(), v.integer()),
pageSize: v.pipe(v.number(), v.integer()),
sortBy: v.optional(v.string()),
sortOrder: v.optional(v.string()),
});
export type PaginationOptions = v.InferOutput<typeof paginationOptionsSchema>;
export const PaginatedFilesSchema = v.object({
data: v.array(fileSchema),
total: v.pipe(v.number(), v.integer()),
page: v.pipe(v.number(), v.integer()),
pageSize: v.pipe(v.number(), v.integer()),
totalPages: v.pipe(v.number(), v.integer()),
});
export type PaginatedFiles = v.InferOutput<typeof PaginatedFilesSchema>;
export const getFilesSchema = v.object({
filters: fileFiltersSchema,
pagination: paginationOptionsSchema,
});
export type GetFiles = v.InferOutput<typeof getFilesSchema>;
export const presignedUploadRequestSchema = v.object({
filename: v.string(),
mimeType: v.string(),
size: v.pipe(v.number(), v.integer()),
visibility: v.optional(v.string()),
});
export type PresignedUploadRequest = v.InferOutput<
typeof presignedUploadRequestSchema
>;
export const presignedUploadResponseSchema = v.object({
uploadUrl: v.string(),
downloadUrl: v.optional(v.string()),
expiresIn: v.pipe(v.number(), v.integer()),
fileId: v.string(),
objectKey: v.string(),
fields: v.optional(v.record(v.string(), v.any())),
});
export type PresignedUploadResponse = v.InferOutput<
typeof presignedUploadResponseSchema
>;
export const fileUploadResultSchema = v.object({
success: v.boolean(),
file: v.optional(fileSchema),
uploadId: v.optional(v.string()),
error: v.optional(v.string()),
});
export type FileUploadResult = v.InferOutput<typeof fileUploadResultSchema>;
export const bulkFileIdsSchema = v.object({
fileIds: v.array(v.string()),
});
export type BulkFileIds = v.InferOutput<typeof bulkFileIdsSchema>;
export const fileUpdateRequestSchema = v.object({
filename: v.optional(v.string()),
visibility: v.optional(v.string()),
metadata: v.optional(v.record(v.string(), v.any())),
tags: v.optional(v.array(v.string())),
});
export type FileUpdateRequest = v.InferOutput<typeof fileUpdateRequestSchema>;
export const fileShareRequestSchema = v.object({
userId: v.string(),
permissions: v.object({
canRead: v.optional(v.boolean()),
canWrite: v.optional(v.boolean()),
canDelete: v.optional(v.boolean()),
canShare: v.optional(v.boolean()),
}),
expiresAt: v.optional(v.date()),
});
export type FileShareRequest = v.InferOutput<typeof fileShareRequestSchema>;
//
// Frontend specific models
//
export const clientFileFiltersSchema = v.object({
mimeType: v.optional(v.string()),
visibility: v.optional(v.string()),
status: v.optional(v.string()),
search: v.optional(v.string()),
tags: v.optional(v.array(v.string())),
});
export type ClientFileFilters = v.InferOutput<typeof clientFileFiltersSchema>;
export const clientPaginationOptionsSchema = v.object({
page: v.pipe(v.number(), v.integer()),
pageSize: v.pipe(v.number(), v.integer()),
sortBy: v.string(),
sortOrder: v.picklist(["asc", "desc"]),
});
export type ClientPaginationOptions = v.InferOutput<
typeof clientPaginationOptionsSchema
>;

View File

@@ -0,0 +1,132 @@
import { FlowExecCtx } from "@core/flow.execution.context";
import { ERROR_CODES, type Err } from "@pkg/result";
import { getError } from "@pkg/logger";
export const fileErrors = {
dbError: (fctx: FlowExecCtx, detail: string): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.DATABASE_ERROR,
message: "Database operation failed",
description: "Please try again later",
detail,
}),
fileNotFound: (fctx: FlowExecCtx, fileId: string): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.NOT_FOUND,
message: "File not found",
description:
"The requested file does not exist or you don't have access to it",
detail: `File ID: ${fileId}`,
}),
getFilesFailed: (fctx: FlowExecCtx, detail: string): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.DATABASE_ERROR,
message: "Failed to fetch files",
description: "Please try again later",
detail,
}),
getFileFailed: (fctx: FlowExecCtx, detail: string): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.DATABASE_ERROR,
message: "Failed to get file",
description: "Please try again later",
detail,
}),
createFileFailed: (fctx: FlowExecCtx, detail: string): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.DATABASE_ERROR,
message: "Failed to create file record",
description: "Please try again later",
detail,
}),
updateFileFailed: (fctx: FlowExecCtx, detail: string): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.DATABASE_ERROR,
message: "Failed to update file",
description: "Please try again later",
detail,
}),
deleteFilesFailed: (fctx: FlowExecCtx, detail: string): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.DATABASE_ERROR,
message: "Failed to delete files",
description: "Please try again later",
detail,
}),
updateStatusFailed: (fctx: FlowExecCtx, detail: string): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.DATABASE_ERROR,
message: "Failed to update file status",
description: "Please try again later",
detail,
}),
shareFileFailed: (fctx: FlowExecCtx, detail: string): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.DATABASE_ERROR,
message: "Failed to share file",
description: "Please try again later",
detail,
}),
uploadFailed: (fctx: FlowExecCtx, detail: string): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.INTERNAL_SERVER_ERROR,
message: "File upload failed",
description: "Please try again later",
detail,
}),
noFileMetadata: (fctx: FlowExecCtx): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.INTERNAL_SERVER_ERROR,
message: "Upload succeeded but no file metadata returned",
description: "Please try uploading again",
detail: "Storage service returned no file metadata",
}),
presignedUrlFailed: (fctx: FlowExecCtx, detail: string): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.INTERNAL_SERVER_ERROR,
message: "Failed to generate presigned URL",
description: "Please try again later",
detail,
}),
noPresignedData: (fctx: FlowExecCtx): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.INTERNAL_SERVER_ERROR,
message: "Failed to generate presigned URL",
description: "Please try again later",
detail: "Storage service returned no presigned data",
}),
storageError: (fctx: FlowExecCtx, detail: string): Err =>
getError({
flowId: fctx.flowId,
code: ERROR_CODES.STORAGE_ERROR,
message: "Storage operation failed",
description: "Please try again later",
detail,
}),
};

View File

@@ -0,0 +1,537 @@
import type {
File,
FileFilters,
FileShareRequest,
FileUpdateRequest,
PaginatedFiles,
PaginationOptions,
} from "./data";
import {
Database,
and,
asc,
count,
desc,
eq,
inArray,
like,
or,
sql,
} from "@pkg/db";
import { ResultAsync, errAsync, okAsync } from "neverthrow";
import { FlowExecCtx } from "@core/flow.execution.context";
import { file, fileAccess } from "@pkg/db/schema";
import { type Err } from "@pkg/result";
import { fileErrors } from "./errors";
import { logDomainEvent } from "@pkg/logger";
export class FileRepository {
constructor(private db: Database) {}
getFiles(
fctx: FlowExecCtx,
filters: FileFilters,
pagination: PaginationOptions,
): ResultAsync<PaginatedFiles, Err> {
const startedAt = Date.now();
logDomainEvent({
event: "files.list.started",
fctx,
meta: {
userId: filters.userId,
hasSearch: Boolean(filters.search),
hasTags: Boolean(filters.tags?.length),
page: pagination.page,
pageSize: pagination.pageSize,
},
});
const { userId, mimeType, visibility, status, search, tags } = filters;
const {
page,
pageSize,
sortBy = "createdAt",
sortOrder = "desc",
} = pagination;
const conditions = [eq(file.userId, userId)];
if (mimeType) {
conditions.push(like(file.mimeType, `${mimeType}%`));
}
if (visibility) {
conditions.push(eq(file.visibility, visibility));
}
if (status) {
conditions.push(eq(file.status, status));
}
if (search) {
conditions.push(
or(
like(file.filename, `%${search}%`),
like(file.originalName, `%${search}%`),
)!,
);
}
if (tags && tags.length > 0) {
conditions.push(sql`${file.tags} @> ${JSON.stringify(tags)}`);
}
const whereClause = and(...conditions);
return ResultAsync.fromPromise(
this.db.select({ count: count() }).from(file).where(whereClause),
(error) => {
logDomainEvent({
level: "error",
event: "files.list.failed",
fctx,
durationMs: Date.now() - startedAt,
error,
meta: { userId },
});
return fileErrors.getFilesFailed(
fctx,
error instanceof Error ? error.message : String(error),
);
},
).andThen((totalResult) => {
const total = totalResult[0]?.count || 0;
const offset = (page - 1) * pageSize;
const getOrderColumn = (currentSortBy: string) => {
switch (currentSortBy) {
case "createdAt":
return file.createdAt;
case "uploadedAt":
return file.uploadedAt;
case "size":
return file.size;
case "filename":
return file.filename;
default:
return file.createdAt;
}
};
const orderColumn = getOrderColumn(sortBy);
const orderFunc = sortOrder === "asc" ? asc : desc;
return ResultAsync.fromPromise(
this.db
.select()
.from(file)
.where(whereClause)
.orderBy(orderFunc(orderColumn))
.limit(pageSize)
.offset(offset),
(error) => {
logDomainEvent({
level: "error",
event: "files.list.failed",
fctx,
durationMs: Date.now() - startedAt,
error,
meta: { userId },
});
return fileErrors.getFilesFailed(
fctx,
error instanceof Error ? error.message : String(error),
);
},
).map((data) => {
const totalPages = Math.ceil(total / pageSize);
logDomainEvent({
event: "files.list.succeeded",
fctx,
durationMs: Date.now() - startedAt,
meta: {
userId,
page,
totalPages,
count: data.length,
},
});
return {
data: data as File[],
total,
page,
pageSize,
totalPages,
};
});
});
}
getFileById(
fctx: FlowExecCtx,
fileId: string,
userId: string,
): ResultAsync<File, Err> {
const startedAt = Date.now();
logDomainEvent({
event: "files.get.started",
fctx,
meta: { fileId, userId },
});
return ResultAsync.fromPromise(
this.db
.select()
.from(file)
.where(and(eq(file.id, fileId), eq(file.userId, userId)))
.limit(1),
(error) => {
logDomainEvent({
level: "error",
event: "files.get.failed",
fctx,
durationMs: Date.now() - startedAt,
error,
meta: { fileId, userId },
});
return fileErrors.getFileFailed(
fctx,
error instanceof Error ? error.message : String(error),
);
},
).andThen((result) => {
const dbFile = result[0];
if (!dbFile) {
logDomainEvent({
level: "warn",
event: "files.get.failed",
fctx,
durationMs: Date.now() - startedAt,
error: { code: "NOT_FOUND", message: "File not found" },
meta: { fileId, userId },
});
return errAsync(fileErrors.fileNotFound(fctx, fileId));
}
logDomainEvent({
event: "files.get.succeeded",
fctx,
durationMs: Date.now() - startedAt,
meta: { fileId, userId },
});
return okAsync(dbFile as File);
});
}
createFile(
fctx: FlowExecCtx,
fileData: Omit<File, "createdAt" | "updatedAt">,
): ResultAsync<File, Err> {
const startedAt = Date.now();
logDomainEvent({
event: "files.create.started",
fctx,
meta: { userId: fileData.userId, filename: fileData.filename },
});
const now = new Date();
const insertData = {
...fileData,
createdAt: now,
updatedAt: now,
} as any;
return ResultAsync.fromPromise(
this.db.insert(file).values(insertData).returning(),
(error) => {
logDomainEvent({
level: "error",
event: "files.create.failed",
fctx,
durationMs: Date.now() - startedAt,
error,
meta: { userId: fileData.userId, filename: fileData.filename },
});
return fileErrors.createFileFailed(
fctx,
error instanceof Error ? error.message : String(error),
);
},
).map((result) => {
const created = result[0] as File;
logDomainEvent({
event: "files.create.succeeded",
fctx,
durationMs: Date.now() - startedAt,
meta: { fileId: created.id, userId: created.userId },
});
return created;
});
}
updateFile(
fctx: FlowExecCtx,
fileId: string,
userId: string,
updates: FileUpdateRequest,
): ResultAsync<File, Err> {
const startedAt = Date.now();
logDomainEvent({
event: "files.update.started",
fctx,
meta: {
fileId,
userId,
hasFilename: updates.filename !== undefined,
hasMetadata: updates.metadata !== undefined,
hasTags: updates.tags !== undefined,
},
});
const updateData = {
...updates,
updatedAt: new Date(),
} as any;
return ResultAsync.fromPromise(
this.db
.update(file)
.set(updateData)
.where(and(eq(file.id, fileId), eq(file.userId, userId)))
.returning(),
(error) => {
logDomainEvent({
level: "error",
event: "files.update.failed",
fctx,
durationMs: Date.now() - startedAt,
error,
meta: { fileId, userId },
});
return fileErrors.updateFileFailed(
fctx,
error instanceof Error ? error.message : String(error),
);
},
).andThen((result) => {
const updated = result[0];
if (!updated) {
logDomainEvent({
level: "warn",
event: "files.update.failed",
fctx,
durationMs: Date.now() - startedAt,
error: { code: "NOT_FOUND", message: "File not found" },
meta: { fileId, userId },
});
return errAsync(fileErrors.fileNotFound(fctx, fileId));
}
logDomainEvent({
event: "files.update.succeeded",
fctx,
durationMs: Date.now() - startedAt,
meta: { fileId, userId },
});
return okAsync(updated as File);
});
}
deleteFiles(
fctx: FlowExecCtx,
fileIds: readonly string[],
userId: string,
): ResultAsync<boolean, Err> {
const startedAt = Date.now();
logDomainEvent({
event: "files.delete.started",
fctx,
meta: { userId, fileCount: fileIds.length },
});
return ResultAsync.fromPromise(
this.db
.delete(file)
.where(and(eq(file.userId, userId), inArray(file.id, [...fileIds]))),
(error) => {
logDomainEvent({
level: "error",
event: "files.delete.failed",
fctx,
durationMs: Date.now() - startedAt,
error,
meta: { userId, fileCount: fileIds.length },
});
return fileErrors.deleteFilesFailed(
fctx,
error instanceof Error ? error.message : String(error),
);
},
).map(() => {
logDomainEvent({
event: "files.delete.succeeded",
fctx,
durationMs: Date.now() - startedAt,
meta: { userId, fileCount: fileIds.length },
});
return true;
});
}
updateFileStatus(
fctx: FlowExecCtx,
fileId: string,
status: string,
processingError?: string,
): ResultAsync<boolean, Err> {
const startedAt = Date.now();
logDomainEvent({
event: "files.status_update.started",
fctx,
meta: {
fileId,
status,
hasProcessingError: Boolean(processingError),
},
});
return ResultAsync.fromPromise(
this.db
.update(file)
.set({
status,
processingError,
updatedAt: new Date(),
})
.where(eq(file.id, fileId)),
(error) => {
logDomainEvent({
level: "error",
event: "files.status_update.failed",
fctx,
durationMs: Date.now() - startedAt,
error,
meta: { fileId, status },
});
return fileErrors.updateStatusFailed(
fctx,
error instanceof Error ? error.message : String(error),
);
},
).map(() => {
logDomainEvent({
event: "files.status_update.succeeded",
fctx,
durationMs: Date.now() - startedAt,
meta: { fileId, status },
});
return true;
});
}
shareFile(
fctx: FlowExecCtx,
fileId: string,
ownerId: string,
shareRequest: FileShareRequest,
): ResultAsync<boolean, Err> {
const startedAt = Date.now();
logDomainEvent({
event: "files.share.started",
fctx,
meta: {
fileId,
ownerId,
targetUserId: shareRequest.userId,
},
});
return ResultAsync.fromPromise(
this.db
.select()
.from(file)
.where(and(eq(file.id, fileId), eq(file.userId, ownerId)))
.limit(1),
(error) => {
logDomainEvent({
level: "error",
event: "files.share.failed",
fctx,
durationMs: Date.now() - startedAt,
error,
meta: { fileId, ownerId, targetUserId: shareRequest.userId },
});
return fileErrors.shareFileFailed(
fctx,
error instanceof Error ? error.message : String(error),
);
},
).andThen((result) => {
const ownedFile = result[0];
if (!ownedFile) {
logDomainEvent({
level: "warn",
event: "files.share.failed",
fctx,
durationMs: Date.now() - startedAt,
error: {
code: "NOT_FOUND",
message: "File not found or not owned by user",
},
meta: { fileId, ownerId, targetUserId: shareRequest.userId },
});
return errAsync(fileErrors.fileNotFound(fctx, fileId));
}
const now = new Date();
return ResultAsync.fromPromise(
this.db
.insert(fileAccess)
.values({
fileId,
userId: shareRequest.userId,
canRead: shareRequest.permissions.canRead || false,
canWrite: shareRequest.permissions.canWrite || false,
canDelete: shareRequest.permissions.canDelete || false,
canShare: shareRequest.permissions.canShare || false,
grantedAt: now,
expiresAt: shareRequest.expiresAt,
createdAt: now,
updatedAt: now,
})
.onConflictDoNothing(),
(error) => {
logDomainEvent({
level: "error",
event: "files.share.failed",
fctx,
durationMs: Date.now() - startedAt,
error,
meta: {
fileId,
ownerId,
targetUserId: shareRequest.userId,
},
});
return fileErrors.shareFileFailed(
fctx,
error instanceof Error ? error.message : String(error),
);
},
).map(() => {
logDomainEvent({
event: "files.share.succeeded",
fctx,
durationMs: Date.now() - startedAt,
meta: { fileId, ownerId, targetUserId: shareRequest.userId },
});
return true;
});
});
}
}

View File

@@ -0,0 +1,287 @@
import { ResultAsync, errAsync, okAsync } from "neverthrow";
import { FlowExecCtx } from "@core/flow.execution.context";
import type { PresignedUploadResponse } from "./data";
import { R2StorageClient } from "@pkg/objectstorage";
import { type Err } from "@pkg/result";
import { fileErrors } from "./errors";
import { logDomainEvent } from "@pkg/logger";
export type StorageConfig = {
bucketName: string;
region: string;
endpoint: string;
accessKey: string;
secretKey: string;
publicUrl: string;
maxFileSize: number;
allowedMimeTypes: string[];
allowedExtensions: string[];
};
export type UploadOptions = {
visibility: "public" | "private";
metadata?: Record<string, any>;
tags?: string[];
processImage?: boolean;
processDocument?: boolean;
processVideo?: boolean;
};
export type UploadedFileMetadata = {
id: string;
filename: string;
originalName: string;
mimeType: string;
size: number;
hash: string;
bucketName: string;
objectKey: string;
r2Url: string;
visibility: string;
userId: string;
metadata?: Record<string, any>;
tags?: string[];
uploadedAt: Date;
};
export class StorageRepository {
private storageClient: R2StorageClient;
constructor(config: StorageConfig) {
this.storageClient = new R2StorageClient(config);
}
uploadFile(
fctx: FlowExecCtx,
buffer: Buffer,
filename: string,
mimeType: string,
userId: string,
options: UploadOptions,
): ResultAsync<UploadedFileMetadata, Err> {
const startedAt = Date.now();
logDomainEvent({
event: "files.storage.upload.started",
fctx,
meta: {
userId,
filename,
mimeType,
size: buffer.length,
visibility: options.visibility,
},
});
return ResultAsync.fromPromise(
this.storageClient.uploadFile(
buffer,
filename,
mimeType,
userId,
options,
),
(error) => {
logDomainEvent({
level: "error",
event: "files.storage.upload.failed",
fctx,
durationMs: Date.now() - startedAt,
error,
meta: { userId, filename },
});
return fileErrors.uploadFailed(
fctx,
error instanceof Error ? error.message : String(error),
);
},
).andThen((uploadResult) => {
if (uploadResult.error) {
logDomainEvent({
level: "error",
event: "files.storage.upload.failed",
fctx,
durationMs: Date.now() - startedAt,
error: uploadResult.error,
meta: { userId, filename, stage: "storage_response" },
});
return errAsync(
fileErrors.uploadFailed(fctx, String(uploadResult.error)),
);
}
const uploadData = uploadResult.data;
if (!uploadData || !uploadData.file) {
logDomainEvent({
level: "error",
event: "files.storage.upload.failed",
fctx,
durationMs: Date.now() - startedAt,
error: {
code: "NO_FILE_METADATA",
message: "Storage upload returned no file metadata",
},
meta: { userId, filename },
});
return errAsync(fileErrors.noFileMetadata(fctx));
}
logDomainEvent({
event: "files.storage.upload.succeeded",
fctx,
durationMs: Date.now() - startedAt,
meta: { userId, fileId: uploadData.file.id, filename },
});
return okAsync(uploadData.file as UploadedFileMetadata);
});
}
generatePresignedUploadUrl(
fctx: FlowExecCtx,
objectKey: string,
mimeType: string,
expiresIn: number,
): ResultAsync<PresignedUploadResponse, Err> {
const startedAt = Date.now();
logDomainEvent({
event: "files.storage.presigned.started",
fctx,
meta: { objectKey, mimeType, expiresIn },
});
return ResultAsync.fromPromise(
this.storageClient.generatePresignedUploadUrl(
objectKey,
mimeType,
expiresIn,
),
(error) => {
logDomainEvent({
level: "error",
event: "files.storage.presigned.failed",
fctx,
durationMs: Date.now() - startedAt,
error,
meta: { objectKey },
});
return fileErrors.presignedUrlFailed(
fctx,
error instanceof Error ? error.message : String(error),
);
},
).andThen((result) => {
if (result.error) {
logDomainEvent({
level: "error",
event: "files.storage.presigned.failed",
fctx,
durationMs: Date.now() - startedAt,
error: result.error,
meta: { objectKey, stage: "storage_response" },
});
return errAsync(
fileErrors.presignedUrlFailed(fctx, String(result.error)),
);
}
const presignedData = result.data;
if (!presignedData) {
logDomainEvent({
level: "error",
event: "files.storage.presigned.failed",
fctx,
durationMs: Date.now() - startedAt,
error: {
code: "NO_PRESIGNED_DATA",
message: "No presigned data returned",
},
meta: { objectKey },
});
return errAsync(fileErrors.noPresignedData(fctx));
}
logDomainEvent({
event: "files.storage.presigned.succeeded",
fctx,
durationMs: Date.now() - startedAt,
meta: { objectKey },
});
return okAsync(presignedData as PresignedUploadResponse);
});
}
deleteFile(
fctx: FlowExecCtx,
objectKey: string,
): ResultAsync<boolean, Err> {
const startedAt = Date.now();
logDomainEvent({
event: "files.storage.delete.started",
fctx,
meta: { objectKey },
});
return ResultAsync.fromPromise(
this.storageClient.deleteFile(objectKey),
(error) => {
logDomainEvent({
level: "error",
event: "files.storage.delete.failed",
fctx,
durationMs: Date.now() - startedAt,
error,
meta: { objectKey },
});
return fileErrors.storageError(
fctx,
error instanceof Error ? error.message : String(error),
);
},
).andThen((result) => {
if (result.error) {
logDomainEvent({
level: "error",
event: "files.storage.delete.failed",
fctx,
durationMs: Date.now() - startedAt,
error: result.error,
meta: { objectKey, stage: "storage_response" },
});
return errAsync(
fileErrors.storageError(fctx, String(result.error)),
);
}
logDomainEvent({
event: "files.storage.delete.succeeded",
fctx,
durationMs: Date.now() - startedAt,
meta: { objectKey },
});
return okAsync(true);
});
}
deleteFiles(
fctx: FlowExecCtx,
objectKeys: string[],
): ResultAsync<boolean, Err> {
const startedAt = Date.now();
logDomainEvent({
event: "files.storage.delete_many.started",
fctx,
meta: { fileCount: objectKeys.length },
});
return ResultAsync.combine(
objectKeys.map((key) => this.deleteFile(fctx, key)),
).map(() => {
logDomainEvent({
event: "files.storage.delete_many.succeeded",
fctx,
durationMs: Date.now() - startedAt,
meta: { fileCount: objectKeys.length },
});
return true;
});
}
}

34
packages/objectstorage/.gitignore vendored Normal file
View File

@@ -0,0 +1,34 @@
# dependencies (bun install)
node_modules
# output
out
dist
*.tgz
# code coverage
coverage
*.lcov
# logs
logs
_.log
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# caches
.eslintcache
.cache
*.tsbuildinfo
# IntelliJ based IDEs
.idea
# Finder (MacOS) folder config
.DS_Store

View File

@@ -0,0 +1,4 @@
export * from "./src/client";
export * from "./src/data";
export * from "./src/processors";
export * from "./src/validation";

View File

@@ -0,0 +1,23 @@
{
"name": "@pkg/objectstorage",
"module": "index.ts",
"type": "module",
"dependencies": {
"@aws-sdk/client-s3": "^3.832.0",
"@aws-sdk/s3-request-presigner": "^3.832.0",
"@pkg/db": "workspace:*",
"@pkg/logger": "workspace:*",
"@pkg/result": "workspace:*",
"mime-types": "^3.0.1",
"sharp": "^0.34.2",
"valibot": "^1.2.0"
},
"devDependencies": {
"@types/mime-types": "^3.0.1",
"@types/node": "latest",
"typescript": "^5.9.3"
},
"peerDependencies": {
"typescript": "^5.9.3"
}
}

View File

@@ -0,0 +1,484 @@
import {
CopyObjectCommand,
DeleteObjectCommand,
GetObjectCommand,
HeadObjectCommand,
PutObjectCommand,
S3Client,
} from "@aws-sdk/client-s3";
import type {
FileMetadata,
FileUploadConfig,
PresignedUrlResult,
UploadOptions,
UploadResult,
} from "./data";
import {
generateFileHash,
generateObjectKey,
isDocumentFile,
isImageFile,
isVideoFile,
} from "./utils";
import { processDocument } from "./processors/document-processor";
import { getSignedUrl } from "@aws-sdk/s3-request-presigner";
import { processVideo } from "./processors/video-processor";
import { processImage } from "./processors/image-processor";
import { ERROR_CODES, type Result } from "@pkg/result";
import { getError, logger } from "@pkg/logger";
import { validateFile } from "./validation";
export class R2StorageClient {
private s3Client: S3Client;
private config: FileUploadConfig;
constructor(config: FileUploadConfig) {
this.config = config;
this.s3Client = new S3Client({
region: config.region,
endpoint: config.endpoint,
credentials: {
accessKeyId: config.accessKey,
secretAccessKey: config.secretKey,
},
});
}
/**
* Upload a file directly to R2
*/
async uploadFile(
file: Buffer | Uint8Array,
originalName: string,
mimeType: string,
userId: string,
options?: UploadOptions,
): Promise<Result<UploadResult>> {
try {
// Validate file
const validationResult = validateFile(
file,
originalName,
mimeType,
this.config,
);
if (!validationResult.isValid) {
return {
error: getError({
code: ERROR_CODES.VALIDATION_ERROR,
message: "File validation failed",
description: validationResult.errors.join(", "),
detail: "File validation failed",
}),
};
}
// Generate file hash for deduplication
const hash = generateFileHash(file);
// Generate unique filename and object key
const fileId = crypto.randomUUID();
const extension = originalName.split(".").pop() || "";
const filename = `${fileId}.${extension}`;
const objectKey = generateObjectKey(userId, filename);
let processedFile = Buffer.from(file);
let thumbnailBuffer: Buffer | undefined;
let metadata = options?.metadata ? { ...options?.metadata } : {};
// Process file based on type
if (options?.processImage && isImageFile(mimeType)) {
const processingResult = await processImage(file, {
format: "webp",
quality: 85,
generateThumbnail: true,
thumbnailSize: { width: 300, height: 300 },
resize: {
width: 1920,
height: 1920,
fit: "inside",
},
});
if (
processingResult.processed &&
processingResult.processedFile
) {
processedFile = Buffer.from(processingResult.processedFile);
thumbnailBuffer = processingResult.thumbnail
? Buffer.from(processingResult.thumbnail)
: undefined;
metadata = { ...metadata, ...processingResult.metadata };
}
} else if (options?.processDocument && isDocumentFile(mimeType)) {
const processingResult = await processDocument(file, mimeType, {
extractText: true,
generatePreview: true,
extractMetadata: true,
});
if (processingResult.processed && processingResult.metadata) {
metadata = { ...metadata, ...processingResult.metadata };
}
} else if (options?.processVideo && isVideoFile(mimeType)) {
const processingResult = await processVideo(file, mimeType, {
generateThumbnail: true,
extractMetadata: true,
thumbnailTimestamp: 1, // 1 second into video
});
if (processingResult.processed && processingResult.metadata) {
metadata = { ...metadata, ...processingResult.metadata };
}
}
// Upload main file to R2
const uploadCommand = new PutObjectCommand({
Bucket: this.config.bucketName,
Key: objectKey,
Body: processedFile,
ContentType: mimeType,
Metadata: {
originalName,
userId,
hash,
uploadId: fileId,
processed: "true",
...Object.fromEntries(
Object.entries(metadata).map(([key, value]) => [
key,
typeof value === "string"
? value
: JSON.stringify(value),
]),
),
},
});
await this.s3Client.send(uploadCommand);
// Upload thumbnail if generated
if (thumbnailBuffer) {
const thumbnailKey = `thumbnails/${userId}/${fileId}_thumb.webp`;
const thumbnailCommand = new PutObjectCommand({
Bucket: this.config.bucketName,
Key: thumbnailKey,
Body: thumbnailBuffer,
ContentType: "image/webp",
Metadata: {
originalFileId: fileId,
type: "thumbnail",
},
});
await this.s3Client.send(thumbnailCommand);
metadata.thumbnailKey = thumbnailKey;
}
// Construct R2 URL
const r2Url = `${this.config.publicUrl || this.config.endpoint}/${objectKey}`;
const fileMetadata: FileMetadata = {
id: fileId,
filename,
originalName,
mimeType,
size: processedFile.length,
hash,
bucketName: this.config.bucketName,
objectKey,
r2Url,
visibility: options?.visibility || "private",
userId,
metadata,
tags: options?.tags,
uploadedAt: new Date(),
};
const result: UploadResult = {
success: true,
file: fileMetadata,
uploadId: fileId,
};
logger.info(
`Successfully uploaded file ${fileId} for user ${userId}`,
);
return { data: result };
} catch (error) {
logger.error("File upload failed:", error);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Upload failed",
description: "Failed to upload file to storage",
detail: "S3 upload operation failed",
},
error,
),
};
}
}
/**
* Generate presigned URL for direct upload
*/
async generatePresignedUploadUrl(
objectKey: string,
mimeType: string,
expiresIn: number = 3600,
): Promise<Result<PresignedUrlResult>> {
try {
const command = new PutObjectCommand({
Bucket: this.config.bucketName,
Key: objectKey,
ContentType: mimeType,
});
const uploadUrl = await getSignedUrl(this.s3Client, command, {
expiresIn,
});
const result: PresignedUrlResult = {
uploadUrl,
expiresIn,
};
logger.info(`Generated presigned URL for ${objectKey}`);
return { data: result };
} catch (error) {
logger.error("Failed to generate presigned URL:", error);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Failed to generate presigned URL",
description: "Could not create upload URL",
detail: "S3 presigned URL generation failed",
},
error,
),
};
}
}
/**
* Get file from R2
*/
async getFile(objectKey: string): Promise<Result<Buffer>> {
try {
const command = new GetObjectCommand({
Bucket: this.config.bucketName,
Key: objectKey,
});
const response = await this.s3Client.send(command);
const body = response.Body;
if (!body) {
return {
error: getError({
code: ERROR_CODES.NOT_FOUND,
message: "File not found",
description: "The requested file does not exist",
detail: "S3 response body is empty",
}),
};
}
// Handle different response body types
if (body instanceof Uint8Array) {
return { data: Buffer.from(body) };
}
// For Node.js Readable streams (AWS SDK v3)
if (typeof body.transformToByteArray === "function") {
const byteArray = await body.transformToByteArray();
return { data: Buffer.from(byteArray) };
}
// Fallback: treat as readable stream
const chunks: Buffer[] = [];
// Type assertion to handle the stream properly
const stream = body as NodeJS.ReadableStream;
return new Promise<Result<Buffer>>((resolve, reject) => {
stream.on("data", (chunk: Buffer) => {
chunks.push(chunk);
});
stream.on("end", () => {
const buffer = Buffer.concat(chunks);
logger.info(`Successfully retrieved file ${objectKey}`);
resolve({ data: buffer });
});
stream.on("error", (error) => {
reject(error);
});
});
} catch (error) {
logger.error(`Failed to get file ${objectKey}:`, error);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Failed to get file",
description: "Could not retrieve file from storage",
detail: "S3 get operation failed",
},
error,
),
};
}
}
/**
* Delete file from R2
*/
async deleteFile(objectKey: string): Promise<Result<boolean>> {
try {
const command = new DeleteObjectCommand({
Bucket: this.config.bucketName,
Key: objectKey,
});
await this.s3Client.send(command);
logger.info(`Successfully deleted file ${objectKey}`);
return { data: true };
} catch (error) {
logger.error(`Failed to delete file ${objectKey}:`, error);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Failed to delete file",
description: "Could not delete file from storage",
detail: "S3 delete operation failed",
},
error,
),
};
}
}
/**
* Get file metadata from R2
*/
async getFileMetadata(
objectKey: string,
): Promise<Result<Record<string, any>>> {
try {
const command = new HeadObjectCommand({
Bucket: this.config.bucketName,
Key: objectKey,
});
const response = await this.s3Client.send(command);
const metadata = {
size: response.ContentLength,
lastModified: response.LastModified,
contentType: response.ContentType,
metadata: response.Metadata || {},
};
logger.info(`Successfully retrieved metadata for ${objectKey}`);
return { data: metadata };
} catch (error) {
logger.error(
`Failed to get file metadata for ${objectKey}:`,
error,
);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Failed to get file metadata",
description: "Could not retrieve file information",
detail: "S3 head operation failed",
},
error,
),
};
}
}
/**
* Check if a file exists in R2
*/
async fileExists(objectKey: string): Promise<Result<boolean>> {
try {
const command = new HeadObjectCommand({
Bucket: this.config.bucketName,
Key: objectKey,
});
await this.s3Client.send(command);
return { data: true };
} catch (error: any) {
if (
error.name === "NotFound" ||
error.$metadata?.httpStatusCode === 404
) {
return { data: false };
}
logger.error(
`Failed to check file existence for ${objectKey}:`,
error,
);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Failed to check file existence",
description: "Could not verify if file exists",
detail: "S3 head operation failed",
},
error,
),
};
}
}
/**
* Copy file within R2
*/
async copyFile(
sourceKey: string,
destinationKey: string,
): Promise<Result<boolean>> {
try {
const command = new CopyObjectCommand({
Bucket: this.config.bucketName,
Key: destinationKey,
CopySource: `${this.config.bucketName}/${sourceKey}`,
});
await this.s3Client.send(command);
logger.info(
`Successfully copied file from ${sourceKey} to ${destinationKey}`,
);
return { data: true };
} catch (error) {
logger.error(
`Failed to copy file from ${sourceKey} to ${destinationKey}:`,
error,
);
return {
error: getError(
{
code: ERROR_CODES.STORAGE_ERROR,
message: "Failed to copy file",
description: "Could not copy file in storage",
detail: "S3 copy operation failed",
},
error,
),
};
}
}
}

View File

@@ -0,0 +1,154 @@
import * as v from "valibot";
// File Upload Configuration Schema
export const fileUploadConfigSchema = v.object({
bucketName: v.string(),
region: v.string(),
endpoint: v.string(),
accessKey: v.string(),
secretKey: v.string(),
publicUrl: v.optional(v.string()),
maxFileSize: v.pipe(v.number(), v.integer()), // in bytes
allowedMimeTypes: v.array(v.string()),
allowedExtensions: v.array(v.string()),
});
export type FileUploadConfig = v.InferOutput<typeof fileUploadConfigSchema>;
// File Visibility Schema
export const fileVisibilitySchema = v.picklist([
"public",
"private",
"restricted",
]);
export type FileVisibility = v.InferOutput<typeof fileVisibilitySchema>;
// File Metadata Schema
export const fileMetadataSchema = v.object({
id: v.string(),
filename: v.string(),
originalName: v.string(),
mimeType: v.string(),
size: v.pipe(v.number(), v.integer()),
hash: v.string(),
bucketName: v.string(),
objectKey: v.string(),
r2Url: v.string(),
visibility: fileVisibilitySchema,
userId: v.string(),
metadata: v.optional(v.record(v.string(), v.any())),
tags: v.optional(v.array(v.string())),
uploadedAt: v.date(),
expiresAt: v.optional(v.date()),
});
export type FileMetadata = v.InferOutput<typeof fileMetadataSchema>;
// Upload Result Schema
export const uploadResultSchema = v.object({
success: v.boolean(),
file: v.optional(fileMetadataSchema),
error: v.optional(v.string()),
uploadId: v.optional(v.string()),
});
export type UploadResult = v.InferOutput<typeof uploadResultSchema>;
// Presigned URL Result Schema
export const presignedUrlResultSchema = v.object({
uploadUrl: v.string(),
downloadUrl: v.optional(v.string()),
expiresIn: v.pipe(v.number(), v.integer()),
fields: v.optional(v.record(v.string(), v.any())),
});
export type PresignedUrlResult = v.InferOutput<typeof presignedUrlResultSchema>;
// File Validation Result Schema
export const fileValidationResultSchema = v.object({
isValid: v.boolean(),
errors: v.array(v.string()),
warnings: v.array(v.string()),
});
export type FileValidationResult = v.InferOutput<
typeof fileValidationResultSchema
>;
// Image Resize Options Schema
export const imageResizeOptionsSchema = v.object({
width: v.optional(v.pipe(v.number(), v.integer())),
height: v.optional(v.pipe(v.number(), v.integer())),
fit: v.optional(
v.picklist(["cover", "contain", "fill", "inside", "outside"]),
),
});
export type ImageResizeOptions = v.InferOutput<typeof imageResizeOptionsSchema>;
// Thumbnail Size Schema
export const thumbnailSizeSchema = v.object({
width: v.pipe(v.number(), v.integer()),
height: v.pipe(v.number(), v.integer()),
});
export type ThumbnailSize = v.InferOutput<typeof thumbnailSizeSchema>;
// Image Processing Options Schema
export const imageProcessingOptionsSchema = v.object({
resize: v.optional(imageResizeOptionsSchema),
format: v.optional(v.picklist(["jpeg", "png", "webp", "avif"])),
quality: v.optional(v.pipe(v.number(), v.integer())),
generateThumbnail: v.optional(v.boolean()),
thumbnailSize: v.optional(thumbnailSizeSchema),
});
export type ImageProcessingOptions = v.InferOutput<
typeof imageProcessingOptionsSchema
>;
// File Processing Result Schema
export const fileProcessingResultSchema = v.object({
processed: v.boolean(),
originalFile: v.optional(v.instance(Uint8Array)), // Buffer equivalent
processedFile: v.optional(v.instance(Uint8Array)), // Buffer equivalent
thumbnail: v.optional(v.instance(Uint8Array)), // Buffer equivalent
metadata: v.optional(v.record(v.string(), v.any())),
error: v.optional(v.string()),
});
export type FileProcessingResult = v.InferOutput<
typeof fileProcessingResultSchema
>;
// File Security Result Schema (from utils.ts)
export const fileSecurityResultSchema = v.object({
isSecure: v.boolean(),
issues: v.array(v.string()),
warnings: v.array(v.string()),
});
export type FileSecurityResult = v.InferOutput<typeof fileSecurityResultSchema>;
// Document Processing Options Schema
export const documentProcessingOptionsSchema = v.object({
extractText: v.optional(v.boolean()),
generatePreview: v.optional(v.boolean()),
extractMetadata: v.optional(v.boolean()),
validateStructure: v.optional(v.boolean()),
});
export type DocumentProcessingOptions = v.InferOutput<
typeof documentProcessingOptionsSchema
>;
// Video Processing Options Schema
export const videoProcessingOptionsSchema = v.object({
generateThumbnail: v.optional(v.boolean()),
extractMetadata: v.optional(v.boolean()),
thumbnailTimestamp: v.optional(v.number()), // Seconds into video for thumbnail
thumbnailSize: v.optional(thumbnailSizeSchema),
});
export type VideoProcessingOptions = v.InferOutput<
typeof videoProcessingOptionsSchema
>;
// Upload Options Schema (used in client.ts)
export const uploadOptionsSchema = v.object({
visibility: v.optional(fileVisibilitySchema),
metadata: v.optional(v.record(v.string(), v.any())),
tags: v.optional(v.array(v.string())),
processImage: v.optional(v.boolean()),
processDocument: v.optional(v.boolean()),
processVideo: v.optional(v.boolean()),
});
export type UploadOptions = v.InferOutput<typeof uploadOptionsSchema>;

View File

@@ -0,0 +1,132 @@
import { createHash } from "crypto";
import type { DocumentProcessingOptions, FileProcessingResult } from "../data";
/**
* Process documents (PDF, text files, etc.)
*/
export async function processDocument(
buffer: Buffer | Uint8Array,
mimeType: string,
options: DocumentProcessingOptions = {},
): Promise<FileProcessingResult> {
try {
const inputBuffer = Buffer.from(buffer);
const metadata: Record<string, any> = {};
// Basic document information
metadata.original = {
size: inputBuffer.length,
mimeType,
hash: createHash("sha256").update(inputBuffer).digest("hex"),
};
// Process based on document type
if (mimeType === "application/pdf") {
return await processPDF(inputBuffer, options);
} else if (mimeType.startsWith("text/")) {
return await processTextFile(inputBuffer, options);
} else {
return await processGenericDocument(inputBuffer, options);
}
} catch (error) {
return {
processed: false,
error: `Document processing failed: ${error instanceof Error ? error.message : String(error)}`,
};
}
}
async function processPDF(
buffer: Buffer,
options: DocumentProcessingOptions,
): Promise<FileProcessingResult> {
const metadata: Record<string, any> = {
type: "pdf",
processed: true,
};
// In a real implementation, you would use a PDF library like pdf2pic or pdf-parse
// For now, we'll just provide basic processing
if (options.extractMetadata) {
// Extract PDF metadata (page count, author, title, etc.)
metadata.pdf = {
// This would be extracted using a PDF library
pageCount: 1, // Placeholder
title: "Unknown",
author: "Unknown",
creationDate: new Date().toISOString(),
};
}
if (options.extractText) {
// Extract text content from PDF
metadata.textContent = {
extracted: true,
characterCount: 0, // Placeholder
wordCount: 0, // Placeholder
};
}
if (options.generatePreview) {
// Generate thumbnail/preview of first page
metadata.preview = {
generated: true,
format: "png",
};
}
return {
processed: true,
originalFile: buffer,
processedFile: buffer, // PDFs typically don't need processing
metadata,
};
}
async function processTextFile(
buffer: Buffer,
options: DocumentProcessingOptions,
): Promise<FileProcessingResult> {
const text = buffer.toString("utf-8");
const metadata: Record<string, any> = {
type: "text",
processed: true,
};
if (options.extractText || options.extractMetadata) {
const lines = text.split("\n");
const words = text.split(/\s+/).filter((word) => word.length > 0);
metadata.textAnalysis = {
characterCount: text.length,
wordCount: words.length,
lineCount: lines.length,
encoding: "utf-8",
};
}
return {
processed: true,
originalFile: buffer,
processedFile: buffer,
metadata,
};
}
async function processGenericDocument(
buffer: Buffer,
options: DocumentProcessingOptions,
): Promise<FileProcessingResult> {
const metadata: Record<string, any> = {
type: "generic",
processed: true,
};
return {
processed: true,
originalFile: buffer,
processedFile: buffer,
metadata,
};
}

View File

@@ -0,0 +1,286 @@
import sharp from "sharp";
import type { FileProcessingResult, ImageProcessingOptions } from "../data";
/**
* Process images with compression, resizing, format conversion, and thumbnail generation
*/
export async function processImage(
buffer: Buffer | Uint8Array,
options: ImageProcessingOptions = {},
): Promise<FileProcessingResult> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
let processedBuffer = inputBuffer;
let thumbnailBuffer: Buffer | undefined;
const metadata: Record<string, any> = {};
// Initialize Sharp instance
const image = sharp(inputBuffer);
const originalMetadata = await image.metadata();
// Store original metadata
metadata.original = {
width: originalMetadata.width,
height: originalMetadata.height,
format: originalMetadata.format,
size: inputBuffer.length,
colorSpace: originalMetadata.space,
channels: originalMetadata.channels,
density: originalMetadata.density,
hasAlpha: originalMetadata.hasAlpha,
};
// Apply transformations
let transformedImage = image;
// Resize if requested
if (options.resize) {
const { width, height, fit = "cover" } = options.resize;
transformedImage = transformedImage.resize(width, height, {
fit: fit as keyof sharp.FitEnum,
withoutEnlargement: true, // Don't enlarge smaller images
});
metadata.processed = {
...metadata.processed,
resized: true,
targetWidth: width,
targetHeight: height,
fit,
};
}
// Apply format conversion and quality settings
const outputFormat = options.format || "webp";
const quality = options.quality || 85;
switch (outputFormat) {
case "jpeg":
transformedImage = transformedImage.jpeg({
quality,
progressive: true,
mozjpeg: true, // Use mozjpeg encoder for better compression
});
break;
case "png":
transformedImage = transformedImage.png({
quality,
compressionLevel: 9,
progressive: true,
});
break;
case "webp":
transformedImage = transformedImage.webp({
quality,
effort: 6, // Max compression effort
});
break;
case "avif":
transformedImage = transformedImage.avif({
quality,
effort: 6,
});
break;
default:
// Keep original format but apply quality if possible
if (originalMetadata.format === "jpeg") {
transformedImage = transformedImage.jpeg({ quality });
} else if (originalMetadata.format === "png") {
transformedImage = transformedImage.png({ quality });
}
}
// Generate processed image
processedBuffer = await transformedImage.toBuffer();
// Get final metadata
const finalMetadata = await sharp(processedBuffer).metadata();
metadata.processed = {
...metadata.processed,
width: finalMetadata.width,
height: finalMetadata.height,
format: outputFormat,
size: processedBuffer.length,
quality,
compressionRatio: inputBuffer.length / processedBuffer.length,
};
// Generate thumbnail if requested
if (options.generateThumbnail) {
const thumbSize = options.thumbnailSize || {
width: 300,
height: 300,
};
thumbnailBuffer = await sharp(inputBuffer)
.resize(thumbSize.width, thumbSize.height, {
fit: "cover",
position: "center",
})
.webp({ quality: 80 })
.toBuffer();
const thumbMetadata = await sharp(thumbnailBuffer).metadata();
metadata.thumbnail = {
width: thumbMetadata.width,
height: thumbMetadata.height,
format: "webp",
size: thumbnailBuffer.length,
};
}
// Add processing stats
metadata.processing = {
processedAt: new Date().toISOString(),
sizeSaving: inputBuffer.length - processedBuffer.length,
sizeSavingPercentage:
((inputBuffer.length - processedBuffer.length) /
inputBuffer.length) *
100,
processingTime: Date.now(), // You'd measure this properly in production
};
return {
processed: true,
originalFile: inputBuffer,
processedFile: processedBuffer,
thumbnail: thumbnailBuffer,
metadata,
};
} catch (error) {
return {
processed: false,
error: `Image processing failed: ${error instanceof Error ? error.message : String(error)}`,
};
}
}
/**
* Extract image metadata without processing
*/
export async function extractImageMetadata(
buffer: Buffer | Uint8Array,
): Promise<Record<string, any>> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
const image = sharp(inputBuffer);
const metadata = await image.metadata();
return {
width: metadata.width,
height: metadata.height,
format: metadata.format,
size: inputBuffer.length,
colorSpace: metadata.space,
channels: metadata.channels,
density: metadata.density,
hasAlpha: metadata.hasAlpha,
isAnimated: metadata.pages && metadata.pages > 1,
orientation: metadata.orientation,
};
} catch (error) {
throw new Error(
`Failed to extract image metadata: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Generate multiple sizes for responsive images
*/
export async function generateResponsiveSizes(
buffer: Buffer | Uint8Array,
sizes: Array<{ name: string; width: number; height?: number }> = [
{ name: "small", width: 400 },
{ name: "medium", width: 800 },
{ name: "large", width: 1200 },
{ name: "xlarge", width: 1920 },
],
): Promise<Record<string, Buffer>> {
const results: Record<string, Buffer> = {};
const inputBuffer = Buffer.isBuffer(buffer) ? buffer : Buffer.from(buffer);
try {
for (const size of sizes) {
const resized = await sharp(inputBuffer)
.resize(size.width, size.height, {
fit: "inside",
withoutEnlargement: true,
})
.webp({ quality: 85 })
.toBuffer();
results[size.name] = resized;
}
return results;
} catch (error) {
throw new Error(
`Failed to generate responsive sizes: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Create an optimized avatar image
*/
export async function processAvatar(
buffer: Buffer | Uint8Array,
size: number = 200,
): Promise<Buffer> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
return await sharp(inputBuffer)
.resize(size, size, { fit: "cover", position: "center" })
.webp({ quality: 90 })
.toBuffer();
} catch (error) {
throw new Error(
`Avatar processing failed: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Remove EXIF data from images for privacy
*/
export async function stripExifData(
buffer: Buffer | Uint8Array,
): Promise<Buffer> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
return await sharp(inputBuffer)
.rotate() // Auto-rotate based on EXIF, then removes EXIF
.toBuffer();
} catch (error) {
throw new Error(
`EXIF stripping failed: ${error instanceof Error ? error.message : String(error)}`,
);
}
}
/**
* Validate if buffer contains a valid image
*/
export async function validateImage(
buffer: Buffer | Uint8Array,
): Promise<boolean> {
try {
const inputBuffer = Buffer.isBuffer(buffer)
? buffer
: Buffer.from(buffer);
const metadata = await sharp(inputBuffer).metadata();
return !!(metadata.width && metadata.height && metadata.format);
} catch {
return false;
}
}

View File

@@ -0,0 +1,3 @@
export * from "./document-processor";
export * from "./image-processor";
export * from "./video-processor";

View File

@@ -0,0 +1,62 @@
import { createHash } from "crypto";
import type { FileProcessingResult, VideoProcessingOptions } from "../data";
/**
* Process video files (extract metadata, generate thumbnails)
* Note: This is a basic implementation. For production use, you'd want to use FFmpeg
*/
export async function processVideo(
buffer: Buffer | Uint8Array,
mimeType: string,
options: VideoProcessingOptions = {},
): Promise<FileProcessingResult> {
try {
const inputBuffer = Buffer.from(buffer);
const metadata: Record<string, any> = {};
// Basic video information
metadata.original = {
size: inputBuffer.length,
mimeType,
hash: createHash("sha256").update(inputBuffer).digest("hex"),
};
// For a real implementation, you would use FFmpeg through a library like fluent-ffmpeg
// This is a placeholder implementation
if (options.extractMetadata) {
metadata.video = {
// These would be extracted using FFmpeg
duration: 0, // seconds
width: 1920, // placeholder
height: 1080, // placeholder
framerate: 30, // placeholder
bitrate: 5000000, // placeholder
codec: "h264", // placeholder
};
}
if (options.generateThumbnail) {
// Generate video thumbnail at specified timestamp
// This would use FFmpeg to extract a frame
metadata.thumbnail = {
generated: true,
timestamp: options.thumbnailTimestamp || 0,
format: "jpeg",
size: options.thumbnailSize || { width: 640, height: 360 },
};
}
return {
processed: true,
originalFile: inputBuffer,
processedFile: inputBuffer, // Videos are typically not re-encoded during upload
metadata,
};
} catch (error) {
return {
processed: false,
error: `Video processing failed: ${error instanceof Error ? error.message : String(error)}`,
};
}
}

View File

@@ -0,0 +1,186 @@
import { lookup } from "mime-types";
import { createHash } from "crypto";
/**
* Generate a secure file hash for deduplication
*/
export function generateFileHash(buffer: Buffer | Uint8Array): string {
return createHash("sha256").update(buffer).digest("hex");
}
/**
* Generate a unique filename with timestamp and random suffix
*/
export function generateUniqueFilename(
originalName: string,
userId?: string,
): string {
const fileId = crypto.randomUUID();
const timestamp = Date.now();
const extension = getFileExtension(originalName);
const baseName = originalName.replace(`.${extension}`, "").slice(0, 50); // Limit length
const sanitizedBaseName = sanitizeFilename(baseName);
const userPrefix = userId ? `${userId.slice(0, 8)}_` : "";
return `${userPrefix}${timestamp}_${sanitizedBaseName}_${fileId}.${extension}`;
}
/**
* Sanitize filename for safe storage
*/
export function sanitizeFilename(filename: string): string {
return filename
.replace(/[^a-zA-Z0-9._-]/g, "_") // Replace unsafe characters
.replace(/_{2,}/g, "_") // Remove multiple underscores
.replace(/^_+|_+$/g, "") // Remove leading/trailing underscores
.toLowerCase();
}
/**
* Get file extension from filename
*/
export function getFileExtension(filename: string): string {
const parts = filename.split(".");
return parts.length > 1 ? parts.pop()!.toLowerCase() : "";
}
/**
* Get MIME type from filename
*/
export function getMimeTypeFromFilename(filename: string): string | null {
return lookup(filename) || null;
}
/**
* Format file size in human readable format
*/
export function formatFileSize(bytes: number): string {
const sizes = ["Bytes", "KB", "MB", "GB", "TB"];
if (bytes === 0) return "0 Bytes";
const i = Math.floor(Math.log(bytes) / Math.log(1024));
return `${Math.round((bytes / Math.pow(1024, i)) * 100) / 100} ${sizes[i]}`;
}
/**
* Check if file type is an image
*/
export function isImageFile(mimeType: string): boolean {
return mimeType.startsWith("image/");
}
/**
* Check if file type is a video
*/
export function isVideoFile(mimeType: string): boolean {
return mimeType.startsWith("video/");
}
/**
* Check if file type is a document
*/
export function isDocumentFile(mimeType: string): boolean {
const documentTypes = [
"application/pdf",
"application/msword",
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
"application/vnd.ms-excel",
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
"text/plain",
"text/csv",
"application/rtf",
];
return documentTypes.includes(mimeType) || mimeType.startsWith("text/");
}
/**
* Generate object key for R2 storage
*/
export function generateObjectKey(
userId: string,
filename: string,
category: string = "uploads",
): string {
const date = new Date();
const year = date.getFullYear();
const month = String(date.getMonth() + 1).padStart(2, "0");
const day = String(date.getDate()).padStart(2, "0");
return `${category}/${userId}/${year}-${month}-${day}/${filename}`;
}
/**
* Validate file against security rules
*/
export interface FileSecurityResult {
isSecure: boolean;
issues: string[];
warnings: string[];
}
export function validateFileSecurity(
buffer: Buffer | Uint8Array,
filename: string,
mimeType: string,
): FileSecurityResult {
const issues: string[] = [];
const warnings: string[] = [];
// Check for malicious file extensions
const dangerousExtensions = [
"exe",
"bat",
"cmd",
"com",
"pif",
"scr",
"vbs",
"js",
"jar",
"app",
"deb",
"pkg",
"dmg",
"rpm",
];
const extension = getFileExtension(filename);
if (dangerousExtensions.includes(extension)) {
issues.push(`Potentially dangerous file extension: .${extension}`);
}
// Check for suspicious filename patterns
if (
filename.includes("..") ||
filename.includes("/") ||
filename.includes("\\")
) {
issues.push("Filename contains path traversal characters");
}
// Check for null bytes
if (filename.includes("\0")) {
issues.push("Filename contains null bytes");
}
// Check file size (basic DoS protection)
if (buffer.length === 0) {
issues.push("File is empty");
}
// Check for MIME type spoofing
const expectedMimeType = getMimeTypeFromFilename(filename);
if (expectedMimeType && expectedMimeType !== mimeType) {
warnings.push(
`MIME type mismatch: expected ${expectedMimeType}, got ${mimeType}`,
);
}
return {
isSecure: issues.length === 0,
issues,
warnings,
};
}

View File

@@ -0,0 +1,81 @@
import type { FileUploadConfig, FileValidationResult } from "./data";
import { lookup } from "mime-types";
export function validateFile(
file: Buffer | Uint8Array,
originalName: string,
mimeType: string,
config: FileUploadConfig,
): FileValidationResult {
const errors: string[] = [];
const warnings: string[] = [];
// Check file size
if (file.length > config.maxFileSize) {
errors.push(
`File size ${file.length} exceeds maximum allowed size of ${config.maxFileSize} bytes`,
);
}
// Check MIME type
if (!config.allowedMimeTypes.includes(mimeType)) {
errors.push(
`MIME type ${mimeType} is not allowed. Allowed types: ${config.allowedMimeTypes.join(", ")}`,
);
}
// Check file extension
const extension = originalName.split(".").pop()?.toLowerCase();
if (!extension || !config.allowedExtensions.includes(extension)) {
errors.push(
`File extension .${extension} is not allowed. Allowed extensions: ${config.allowedExtensions.join(", ")}`,
);
}
// Verify MIME type matches file extension
const expectedMimeType = lookup(originalName);
if (expectedMimeType && expectedMimeType !== mimeType) {
warnings.push(
`MIME type ${mimeType} doesn't match expected type ${expectedMimeType} for file ${originalName}`,
);
}
// Check for empty file
if (file.length === 0) {
errors.push("File is empty");
}
// Basic file signature validation for common types
if (mimeType.startsWith("image/")) {
const isValidImage = validateImageSignature(file, mimeType);
if (!isValidImage) {
errors.push("Invalid image file signature");
}
}
return {
isValid: errors.length === 0,
errors,
warnings,
};
}
function validateImageSignature(
file: Buffer | Uint8Array,
mimeType: string,
): boolean {
const buffer = Buffer.from(file);
// Check basic image signatures
const signatures = {
"image/jpeg": [0xff, 0xd8, 0xff],
"image/png": [0x89, 0x50, 0x4e, 0x47],
"image/gif": [0x47, 0x49, 0x46],
"image/webp": [0x52, 0x49, 0x46, 0x46],
};
const signature = signatures[mimeType as keyof typeof signatures];
if (!signature) return true; // Skip validation for unknown types
return signature.every((byte, index) => buffer[index] === byte);
}

View File

@@ -0,0 +1,28 @@
{
"compilerOptions": {
// Environment setup & latest features
"lib": ["ESNext"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
// Bundler mode
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"noEmit": true,
// Best practices
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedIndexedAccess": true,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false
}
}

View File

@@ -32,6 +32,19 @@ export const settingsSchema = v.object({
otelServiceName: v.string(),
otelExporterOtlpHttpEndpoint: v.string(),
// R2/Object Storage settings
r2BucketName: v.string(),
r2Region: v.string(),
r2Endpoint: v.string(),
r2AccessKey: v.string(),
r2SecretKey: v.string(),
r2PublicUrl: v.optional(v.string()),
// File upload settings
maxFileSize: v.number(),
allowedMimeTypes: v.array(v.string()),
allowedExtensions: v.array(v.string()),
});
export type Settings = v.InferOutput<typeof settingsSchema>;
@@ -53,6 +66,16 @@ function getEnvNumber(key: string, defaultValue: number): number {
return Number.isNaN(parsed) ? defaultValue : parsed;
}
/**
* Parse comma-separated string into array
*/
function parseCommaSeparated(value: string): string[] {
return value
.split(",")
.map((item) => item.trim())
.filter((item) => item.length > 0);
}
/**
* Load and validate settings from environment variables
*/
@@ -91,6 +114,26 @@ function loadSettings(): Settings {
otelExporterOtlpHttpEndpoint: getEnv(
"OTEL_EXPORTER_OTLP_HTTP_ENDPOINT",
),
// R2/Object Storage settings
r2BucketName: getEnv("R2_BUCKET_NAME"),
r2Region: getEnv("R2_REGION", "auto"),
r2Endpoint: getEnv("R2_ENDPOINT"),
r2AccessKey: getEnv("R2_ACCESS_KEY"),
r2SecretKey: getEnv("R2_SECRET_KEY"),
r2PublicUrl: getEnv("R2_PUBLIC_URL") || undefined,
// File upload settings
maxFileSize: getEnvNumber("MAX_FILE_SIZE", 10485760), // 10MB default
allowedMimeTypes: parseCommaSeparated(
getEnv(
"ALLOWED_MIME_TYPES",
"image/jpeg,image/png,image/webp,image/gif,application/pdf,text/plain",
),
),
allowedExtensions: parseCommaSeparated(
getEnv("ALLOWED_EXTENSIONS", "jpg,jpeg,png,webp,gif,pdf,txt"),
),
};
try {